FFmpeg
cmdutils.c
Go to the documentation of this file.
1 /*
2  * Various utilities for command line tools
3  * Copyright (c) 2000-2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include <string.h>
23 #include <stdint.h>
24 #include <stdlib.h>
25 #include <errno.h>
26 #include <math.h>
27 
28 /* Include only the enabled headers since some compilers (namely, Sun
29  Studio) will not omit unused inline functions and create undefined
30  references to libraries that are not being built. */
31 
32 #include "config.h"
33 #include "compat/va_copy.h"
34 #include "libavformat/avformat.h"
35 #include "libavfilter/avfilter.h"
36 #include "libavdevice/avdevice.h"
38 #include "libswscale/swscale.h"
41 #include "libavutil/attributes.h"
42 #include "libavutil/avassert.h"
43 #include "libavutil/avstring.h"
44 #include "libavutil/bprint.h"
45 #include "libavutil/display.h"
46 #include "libavutil/mathematics.h"
47 #include "libavutil/imgutils.h"
48 #include "libavutil/libm.h"
49 #include "libavutil/parseutils.h"
50 #include "libavutil/pixdesc.h"
51 #include "libavutil/eval.h"
52 #include "libavutil/dict.h"
53 #include "libavutil/opt.h"
54 #include "libavutil/cpu.h"
55 #include "libavutil/ffversion.h"
56 #include "libavutil/version.h"
57 #include "cmdutils.h"
58 #if HAVE_SYS_RESOURCE_H
59 #include <sys/time.h>
60 #include <sys/resource.h>
61 #endif
62 #ifdef _WIN32
63 #include <windows.h>
64 #endif
65 
66 static int init_report(const char *env);
67 
71 
72 static FILE *report_file;
74 int hide_banner = 0;
75 
80 };
81 
82 void init_opts(void)
83 {
84  av_dict_set(&sws_dict, "flags", "bicubic", 0);
85 }
86 
87 void uninit_opts(void)
88 {
94 }
95 
96 void log_callback_help(void *ptr, int level, const char *fmt, va_list vl)
97 {
98  vfprintf(stdout, fmt, vl);
99 }
100 
101 static void log_callback_report(void *ptr, int level, const char *fmt, va_list vl)
102 {
103  va_list vl2;
104  char line[1024];
105  static int print_prefix = 1;
106 
107  va_copy(vl2, vl);
108  av_log_default_callback(ptr, level, fmt, vl);
109  av_log_format_line(ptr, level, fmt, vl2, line, sizeof(line), &print_prefix);
110  va_end(vl2);
111  if (report_file_level >= level) {
112  fputs(line, report_file);
113  fflush(report_file);
114  }
115 }
116 
117 void init_dynload(void)
118 {
119 #if HAVE_SETDLLDIRECTORY && defined(_WIN32)
120  /* Calling SetDllDirectory with the empty string (but not NULL) removes the
121  * current working directory from the DLL search path as a security pre-caution. */
122  SetDllDirectory("");
123 #endif
124 }
125 
126 static void (*program_exit)(int ret);
127 
128 void register_exit(void (*cb)(int ret))
129 {
130  program_exit = cb;
131 }
132 
133 void exit_program(int ret)
134 {
135  if (program_exit)
136  program_exit(ret);
137 
138  exit(ret);
139 }
140 
141 double parse_number_or_die(const char *context, const char *numstr, int type,
142  double min, double max)
143 {
144  char *tail;
145  const char *error;
146  double d = av_strtod(numstr, &tail);
147  if (*tail)
148  error = "Expected number for %s but found: %s\n";
149  else if (d < min || d > max)
150  error = "The value for %s was %s which is not within %f - %f\n";
151  else if (type == OPT_INT64 && (int64_t)d != d)
152  error = "Expected int64 for %s but found %s\n";
153  else if (type == OPT_INT && (int)d != d)
154  error = "Expected int for %s but found %s\n";
155  else
156  return d;
157  av_log(NULL, AV_LOG_FATAL, error, context, numstr, min, max);
158  exit_program(1);
159  return 0;
160 }
161 
162 int64_t parse_time_or_die(const char *context, const char *timestr,
163  int is_duration)
164 {
165  int64_t us;
166  if (av_parse_time(&us, timestr, is_duration) < 0) {
167  av_log(NULL, AV_LOG_FATAL, "Invalid %s specification for %s: %s\n",
168  is_duration ? "duration" : "date", context, timestr);
169  exit_program(1);
170  }
171  return us;
172 }
173 
174 void show_help_options(const OptionDef *options, const char *msg, int req_flags,
175  int rej_flags, int alt_flags)
176 {
177  const OptionDef *po;
178  int first;
179 
180  first = 1;
181  for (po = options; po->name; po++) {
182  char buf[128];
183 
184  if (((po->flags & req_flags) != req_flags) ||
185  (alt_flags && !(po->flags & alt_flags)) ||
186  (po->flags & rej_flags))
187  continue;
188 
189  if (first) {
190  printf("%s\n", msg);
191  first = 0;
192  }
193  av_strlcpy(buf, po->name, sizeof(buf));
194  if (po->argname) {
195  av_strlcat(buf, " ", sizeof(buf));
196  av_strlcat(buf, po->argname, sizeof(buf));
197  }
198  printf("-%-17s %s\n", buf, po->help);
199  }
200  printf("\n");
201 }
202 
203 void show_help_children(const AVClass *class, int flags)
204 {
205  void *iter = NULL;
206  const AVClass *child;
207  if (class->option) {
208  av_opt_show2(&class, NULL, flags, 0);
209  printf("\n");
210  }
211 
212  while (child = av_opt_child_class_iterate(class, &iter))
213  show_help_children(child, flags);
214 }
215 
216 static const OptionDef *find_option(const OptionDef *po, const char *name)
217 {
218  while (po->name) {
219  const char *end;
220  if (av_strstart(name, po->name, &end) && (!*end || *end == ':'))
221  break;
222  po++;
223  }
224  return po;
225 }
226 
227 /* _WIN32 means using the windows libc - cygwin doesn't define that
228  * by default. HAVE_COMMANDLINETOARGVW is true on cygwin, while
229  * it doesn't provide the actual command line via GetCommandLineW(). */
230 #if HAVE_COMMANDLINETOARGVW && defined(_WIN32)
231 #include <shellapi.h>
232 /* Will be leaked on exit */
233 static char** win32_argv_utf8 = NULL;
234 static int win32_argc = 0;
235 
236 /**
237  * Prepare command line arguments for executable.
238  * For Windows - perform wide-char to UTF-8 conversion.
239  * Input arguments should be main() function arguments.
240  * @param argc_ptr Arguments number (including executable)
241  * @param argv_ptr Arguments list.
242  */
243 static void prepare_app_arguments(int *argc_ptr, char ***argv_ptr)
244 {
245  char *argstr_flat;
246  wchar_t **argv_w;
247  int i, buffsize = 0, offset = 0;
248 
249  if (win32_argv_utf8) {
250  *argc_ptr = win32_argc;
251  *argv_ptr = win32_argv_utf8;
252  return;
253  }
254 
255  win32_argc = 0;
256  argv_w = CommandLineToArgvW(GetCommandLineW(), &win32_argc);
257  if (win32_argc <= 0 || !argv_w)
258  return;
259 
260  /* determine the UTF-8 buffer size (including NULL-termination symbols) */
261  for (i = 0; i < win32_argc; i++)
262  buffsize += WideCharToMultiByte(CP_UTF8, 0, argv_w[i], -1,
263  NULL, 0, NULL, NULL);
264 
265  win32_argv_utf8 = av_mallocz(sizeof(char *) * (win32_argc + 1) + buffsize);
266  argstr_flat = (char *)win32_argv_utf8 + sizeof(char *) * (win32_argc + 1);
267  if (!win32_argv_utf8) {
268  LocalFree(argv_w);
269  return;
270  }
271 
272  for (i = 0; i < win32_argc; i++) {
273  win32_argv_utf8[i] = &argstr_flat[offset];
274  offset += WideCharToMultiByte(CP_UTF8, 0, argv_w[i], -1,
275  &argstr_flat[offset],
276  buffsize - offset, NULL, NULL);
277  }
278  win32_argv_utf8[i] = NULL;
279  LocalFree(argv_w);
280 
281  *argc_ptr = win32_argc;
282  *argv_ptr = win32_argv_utf8;
283 }
284 #else
285 static inline void prepare_app_arguments(int *argc_ptr, char ***argv_ptr)
286 {
287  /* nothing to do */
288 }
289 #endif /* HAVE_COMMANDLINETOARGVW */
290 
291 static int write_option(void *optctx, const OptionDef *po, const char *opt,
292  const char *arg)
293 {
294  /* new-style options contain an offset into optctx, old-style address of
295  * a global var*/
296  void *dst = po->flags & (OPT_OFFSET | OPT_SPEC) ?
297  (uint8_t *)optctx + po->u.off : po->u.dst_ptr;
298  int *dstcount;
299 
300  if (po->flags & OPT_SPEC) {
301  SpecifierOpt **so = dst;
302  char *p = strchr(opt, ':');
303  char *str;
304 
305  dstcount = (int *)(so + 1);
306  *so = grow_array(*so, sizeof(**so), dstcount, *dstcount + 1);
307  str = av_strdup(p ? p + 1 : "");
308  if (!str)
309  return AVERROR(ENOMEM);
310  (*so)[*dstcount - 1].specifier = str;
311  dst = &(*so)[*dstcount - 1].u;
312  }
313 
314  if (po->flags & OPT_STRING) {
315  char *str;
316  str = av_strdup(arg);
317  av_freep(dst);
318  if (!str)
319  return AVERROR(ENOMEM);
320  *(char **)dst = str;
321  } else if (po->flags & OPT_BOOL || po->flags & OPT_INT) {
322  *(int *)dst = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX);
323  } else if (po->flags & OPT_INT64) {
324  *(int64_t *)dst = parse_number_or_die(opt, arg, OPT_INT64, INT64_MIN, INT64_MAX);
325  } else if (po->flags & OPT_TIME) {
326  *(int64_t *)dst = parse_time_or_die(opt, arg, 1);
327  } else if (po->flags & OPT_FLOAT) {
328  *(float *)dst = parse_number_or_die(opt, arg, OPT_FLOAT, -INFINITY, INFINITY);
329  } else if (po->flags & OPT_DOUBLE) {
330  *(double *)dst = parse_number_or_die(opt, arg, OPT_DOUBLE, -INFINITY, INFINITY);
331  } else if (po->u.func_arg) {
332  int ret = po->u.func_arg(optctx, opt, arg);
333  if (ret < 0) {
335  "Failed to set value '%s' for option '%s': %s\n",
336  arg, opt, av_err2str(ret));
337  return ret;
338  }
339  }
340  if (po->flags & OPT_EXIT)
341  exit_program(0);
342 
343  return 0;
344 }
345 
346 int parse_option(void *optctx, const char *opt, const char *arg,
347  const OptionDef *options)
348 {
349  const OptionDef *po;
350  int ret;
351 
352  po = find_option(options, opt);
353  if (!po->name && opt[0] == 'n' && opt[1] == 'o') {
354  /* handle 'no' bool option */
355  po = find_option(options, opt + 2);
356  if ((po->name && (po->flags & OPT_BOOL)))
357  arg = "0";
358  } else if (po->flags & OPT_BOOL)
359  arg = "1";
360 
361  if (!po->name)
362  po = find_option(options, "default");
363  if (!po->name) {
364  av_log(NULL, AV_LOG_ERROR, "Unrecognized option '%s'\n", opt);
365  return AVERROR(EINVAL);
366  }
367  if (po->flags & HAS_ARG && !arg) {
368  av_log(NULL, AV_LOG_ERROR, "Missing argument for option '%s'\n", opt);
369  return AVERROR(EINVAL);
370  }
371 
372  ret = write_option(optctx, po, opt, arg);
373  if (ret < 0)
374  return ret;
375 
376  return !!(po->flags & HAS_ARG);
377 }
378 
379 void parse_options(void *optctx, int argc, char **argv, const OptionDef *options,
380  void (*parse_arg_function)(void *, const char*))
381 {
382  const char *opt;
383  int optindex, handleoptions = 1, ret;
384 
385  /* perform system-dependent conversions for arguments list */
386  prepare_app_arguments(&argc, &argv);
387 
388  /* parse options */
389  optindex = 1;
390  while (optindex < argc) {
391  opt = argv[optindex++];
392 
393  if (handleoptions && opt[0] == '-' && opt[1] != '\0') {
394  if (opt[1] == '-' && opt[2] == '\0') {
395  handleoptions = 0;
396  continue;
397  }
398  opt++;
399 
400  if ((ret = parse_option(optctx, opt, argv[optindex], options)) < 0)
401  exit_program(1);
402  optindex += ret;
403  } else {
404  if (parse_arg_function)
405  parse_arg_function(optctx, opt);
406  }
407  }
408 }
409 
410 int parse_optgroup(void *optctx, OptionGroup *g)
411 {
412  int i, ret;
413 
414  av_log(NULL, AV_LOG_DEBUG, "Parsing a group of options: %s %s.\n",
415  g->group_def->name, g->arg);
416 
417  for (i = 0; i < g->nb_opts; i++) {
418  Option *o = &g->opts[i];
419 
420  if (g->group_def->flags &&
421  !(g->group_def->flags & o->opt->flags)) {
422  av_log(NULL, AV_LOG_ERROR, "Option %s (%s) cannot be applied to "
423  "%s %s -- you are trying to apply an input option to an "
424  "output file or vice versa. Move this option before the "
425  "file it belongs to.\n", o->key, o->opt->help,
426  g->group_def->name, g->arg);
427  return AVERROR(EINVAL);
428  }
429 
430  av_log(NULL, AV_LOG_DEBUG, "Applying option %s (%s) with argument %s.\n",
431  o->key, o->opt->help, o->val);
432 
433  ret = write_option(optctx, o->opt, o->key, o->val);
434  if (ret < 0)
435  return ret;
436  }
437 
438  av_log(NULL, AV_LOG_DEBUG, "Successfully parsed a group of options.\n");
439 
440  return 0;
441 }
442 
443 int locate_option(int argc, char **argv, const OptionDef *options,
444  const char *optname)
445 {
446  const OptionDef *po;
447  int i;
448 
449  for (i = 1; i < argc; i++) {
450  const char *cur_opt = argv[i];
451 
452  if (*cur_opt++ != '-')
453  continue;
454 
455  po = find_option(options, cur_opt);
456  if (!po->name && cur_opt[0] == 'n' && cur_opt[1] == 'o')
457  po = find_option(options, cur_opt + 2);
458 
459  if ((!po->name && !strcmp(cur_opt, optname)) ||
460  (po->name && !strcmp(optname, po->name)))
461  return i;
462 
463  if (!po->name || po->flags & HAS_ARG)
464  i++;
465  }
466  return 0;
467 }
468 
469 static void dump_argument(const char *a)
470 {
471  const unsigned char *p;
472 
473  for (p = a; *p; p++)
474  if (!((*p >= '+' && *p <= ':') || (*p >= '@' && *p <= 'Z') ||
475  *p == '_' || (*p >= 'a' && *p <= 'z')))
476  break;
477  if (!*p) {
478  fputs(a, report_file);
479  return;
480  }
481  fputc('"', report_file);
482  for (p = a; *p; p++) {
483  if (*p == '\\' || *p == '"' || *p == '$' || *p == '`')
484  fprintf(report_file, "\\%c", *p);
485  else if (*p < ' ' || *p > '~')
486  fprintf(report_file, "\\x%02x", *p);
487  else
488  fputc(*p, report_file);
489  }
490  fputc('"', report_file);
491 }
492 
493 static void check_options(const OptionDef *po)
494 {
495  while (po->name) {
496  if (po->flags & OPT_PERFILE)
498  po++;
499  }
500 }
501 
502 void parse_loglevel(int argc, char **argv, const OptionDef *options)
503 {
504  int idx = locate_option(argc, argv, options, "loglevel");
505  const char *env;
506 
508 
509  if (!idx)
510  idx = locate_option(argc, argv, options, "v");
511  if (idx && argv[idx + 1])
512  opt_loglevel(NULL, "loglevel", argv[idx + 1]);
513  idx = locate_option(argc, argv, options, "report");
514  if ((env = getenv("FFREPORT")) || idx) {
515  init_report(env);
516  if (report_file) {
517  int i;
518  fprintf(report_file, "Command line:\n");
519  for (i = 0; i < argc; i++) {
520  dump_argument(argv[i]);
521  fputc(i < argc - 1 ? ' ' : '\n', report_file);
522  }
523  fflush(report_file);
524  }
525  }
526  idx = locate_option(argc, argv, options, "hide_banner");
527  if (idx)
528  hide_banner = 1;
529 }
530 
531 static const AVOption *opt_find(void *obj, const char *name, const char *unit,
532  int opt_flags, int search_flags)
533 {
534  const AVOption *o = av_opt_find(obj, name, unit, opt_flags, search_flags);
535  if(o && !o->flags)
536  return NULL;
537  return o;
538 }
539 
540 #define FLAGS (o->type == AV_OPT_TYPE_FLAGS && (arg[0]=='-' || arg[0]=='+')) ? AV_DICT_APPEND : 0
541 int opt_default(void *optctx, const char *opt, const char *arg)
542 {
543  const AVOption *o;
544  int consumed = 0;
545  char opt_stripped[128];
546  const char *p;
547  const AVClass *cc = avcodec_get_class(), *fc = avformat_get_class();
548 #if CONFIG_AVRESAMPLE
549  const AVClass *rc = avresample_get_class();
550 #endif
551 #if CONFIG_SWSCALE
552  const AVClass *sc = sws_get_class();
553 #endif
554 #if CONFIG_SWRESAMPLE
555  const AVClass *swr_class = swr_get_class();
556 #endif
557 
558  if (!strcmp(opt, "debug") || !strcmp(opt, "fdebug"))
560 
561  if (!(p = strchr(opt, ':')))
562  p = opt + strlen(opt);
563  av_strlcpy(opt_stripped, opt, FFMIN(sizeof(opt_stripped), p - opt + 1));
564 
565  if ((o = opt_find(&cc, opt_stripped, NULL, 0,
567  ((opt[0] == 'v' || opt[0] == 'a' || opt[0] == 's') &&
568  (o = opt_find(&cc, opt + 1, NULL, 0, AV_OPT_SEARCH_FAKE_OBJ)))) {
569  av_dict_set(&codec_opts, opt, arg, FLAGS);
570  consumed = 1;
571  }
572  if ((o = opt_find(&fc, opt, NULL, 0,
574  av_dict_set(&format_opts, opt, arg, FLAGS);
575  if (consumed)
576  av_log(NULL, AV_LOG_VERBOSE, "Routing option %s to both codec and muxer layer\n", opt);
577  consumed = 1;
578  }
579 #if CONFIG_SWSCALE
580  if (!consumed && (o = opt_find(&sc, opt, NULL, 0,
582  struct SwsContext *sws = sws_alloc_context();
583  int ret = av_opt_set(sws, opt, arg, 0);
584  sws_freeContext(sws);
585  if (!strcmp(opt, "srcw") || !strcmp(opt, "srch") ||
586  !strcmp(opt, "dstw") || !strcmp(opt, "dsth") ||
587  !strcmp(opt, "src_format") || !strcmp(opt, "dst_format")) {
588  av_log(NULL, AV_LOG_ERROR, "Directly using swscale dimensions/format options is not supported, please use the -s or -pix_fmt options\n");
589  return AVERROR(EINVAL);
590  }
591  if (ret < 0) {
592  av_log(NULL, AV_LOG_ERROR, "Error setting option %s.\n", opt);
593  return ret;
594  }
595 
596  av_dict_set(&sws_dict, opt, arg, FLAGS);
597 
598  consumed = 1;
599  }
600 #else
601  if (!consumed && !strcmp(opt, "sws_flags")) {
602  av_log(NULL, AV_LOG_WARNING, "Ignoring %s %s, due to disabled swscale\n", opt, arg);
603  consumed = 1;
604  }
605 #endif
606 #if CONFIG_SWRESAMPLE
607  if (!consumed && (o=opt_find(&swr_class, opt, NULL, 0,
609  struct SwrContext *swr = swr_alloc();
610  int ret = av_opt_set(swr, opt, arg, 0);
611  swr_free(&swr);
612  if (ret < 0) {
613  av_log(NULL, AV_LOG_ERROR, "Error setting option %s.\n", opt);
614  return ret;
615  }
616  av_dict_set(&swr_opts, opt, arg, FLAGS);
617  consumed = 1;
618  }
619 #endif
620 #if CONFIG_AVRESAMPLE
621  if ((o=opt_find(&rc, opt, NULL, 0,
624  consumed = 1;
625  }
626 #endif
627 
628  if (consumed)
629  return 0;
631 }
632 
633 /*
634  * Check whether given option is a group separator.
635  *
636  * @return index of the group definition that matched or -1 if none
637  */
638 static int match_group_separator(const OptionGroupDef *groups, int nb_groups,
639  const char *opt)
640 {
641  int i;
642 
643  for (i = 0; i < nb_groups; i++) {
644  const OptionGroupDef *p = &groups[i];
645  if (p->sep && !strcmp(p->sep, opt))
646  return i;
647  }
648 
649  return -1;
650 }
651 
652 /*
653  * Finish parsing an option group.
654  *
655  * @param group_idx which group definition should this group belong to
656  * @param arg argument of the group delimiting option
657  */
658 static void finish_group(OptionParseContext *octx, int group_idx,
659  const char *arg)
660 {
661  OptionGroupList *l = &octx->groups[group_idx];
662  OptionGroup *g;
663 
664  GROW_ARRAY(l->groups, l->nb_groups);
665  g = &l->groups[l->nb_groups - 1];
666 
667  *g = octx->cur_group;
668  g->arg = arg;
669  g->group_def = l->group_def;
670  g->sws_dict = sws_dict;
671  g->swr_opts = swr_opts;
672  g->codec_opts = codec_opts;
673  g->format_opts = format_opts;
674  g->resample_opts = resample_opts;
675 
676  codec_opts = NULL;
677  format_opts = NULL;
679  sws_dict = NULL;
680  swr_opts = NULL;
681  init_opts();
682 
683  memset(&octx->cur_group, 0, sizeof(octx->cur_group));
684 }
685 
686 /*
687  * Add an option instance to currently parsed group.
688  */
689 static void add_opt(OptionParseContext *octx, const OptionDef *opt,
690  const char *key, const char *val)
691 {
692  int global = !(opt->flags & (OPT_PERFILE | OPT_SPEC | OPT_OFFSET));
693  OptionGroup *g = global ? &octx->global_opts : &octx->cur_group;
694 
695  GROW_ARRAY(g->opts, g->nb_opts);
696  g->opts[g->nb_opts - 1].opt = opt;
697  g->opts[g->nb_opts - 1].key = key;
698  g->opts[g->nb_opts - 1].val = val;
699 }
700 
702  const OptionGroupDef *groups, int nb_groups)
703 {
704  static const OptionGroupDef global_group = { "global" };
705  int i;
706 
707  memset(octx, 0, sizeof(*octx));
708 
709  octx->nb_groups = nb_groups;
710  octx->groups = av_mallocz_array(octx->nb_groups, sizeof(*octx->groups));
711  if (!octx->groups)
712  exit_program(1);
713 
714  for (i = 0; i < octx->nb_groups; i++)
715  octx->groups[i].group_def = &groups[i];
716 
717  octx->global_opts.group_def = &global_group;
718  octx->global_opts.arg = "";
719 
720  init_opts();
721 }
722 
724 {
725  int i, j;
726 
727  for (i = 0; i < octx->nb_groups; i++) {
728  OptionGroupList *l = &octx->groups[i];
729 
730  for (j = 0; j < l->nb_groups; j++) {
731  av_freep(&l->groups[j].opts);
735 
736  av_dict_free(&l->groups[j].sws_dict);
737  av_dict_free(&l->groups[j].swr_opts);
738  }
739  av_freep(&l->groups);
740  }
741  av_freep(&octx->groups);
742 
743  av_freep(&octx->cur_group.opts);
744  av_freep(&octx->global_opts.opts);
745 
746  uninit_opts();
747 }
748 
749 int split_commandline(OptionParseContext *octx, int argc, char *argv[],
750  const OptionDef *options,
751  const OptionGroupDef *groups, int nb_groups)
752 {
753  int optindex = 1;
754  int dashdash = -2;
755 
756  /* perform system-dependent conversions for arguments list */
757  prepare_app_arguments(&argc, &argv);
758 
759  init_parse_context(octx, groups, nb_groups);
760  av_log(NULL, AV_LOG_DEBUG, "Splitting the commandline.\n");
761 
762  while (optindex < argc) {
763  const char *opt = argv[optindex++], *arg;
764  const OptionDef *po;
765  int ret;
766 
767  av_log(NULL, AV_LOG_DEBUG, "Reading option '%s' ...", opt);
768 
769  if (opt[0] == '-' && opt[1] == '-' && !opt[2]) {
770  dashdash = optindex;
771  continue;
772  }
773  /* unnamed group separators, e.g. output filename */
774  if (opt[0] != '-' || !opt[1] || dashdash+1 == optindex) {
775  finish_group(octx, 0, opt);
776  av_log(NULL, AV_LOG_DEBUG, " matched as %s.\n", groups[0].name);
777  continue;
778  }
779  opt++;
780 
781 #define GET_ARG(arg) \
782 do { \
783  arg = argv[optindex++]; \
784  if (!arg) { \
785  av_log(NULL, AV_LOG_ERROR, "Missing argument for option '%s'.\n", opt);\
786  return AVERROR(EINVAL); \
787  } \
788 } while (0)
789 
790  /* named group separators, e.g. -i */
791  if ((ret = match_group_separator(groups, nb_groups, opt)) >= 0) {
792  GET_ARG(arg);
793  finish_group(octx, ret, arg);
794  av_log(NULL, AV_LOG_DEBUG, " matched as %s with argument '%s'.\n",
795  groups[ret].name, arg);
796  continue;
797  }
798 
799  /* normal options */
800  po = find_option(options, opt);
801  if (po->name) {
802  if (po->flags & OPT_EXIT) {
803  /* optional argument, e.g. -h */
804  arg = argv[optindex++];
805  } else if (po->flags & HAS_ARG) {
806  GET_ARG(arg);
807  } else {
808  arg = "1";
809  }
810 
811  add_opt(octx, po, opt, arg);
812  av_log(NULL, AV_LOG_DEBUG, " matched as option '%s' (%s) with "
813  "argument '%s'.\n", po->name, po->help, arg);
814  continue;
815  }
816 
817  /* AVOptions */
818  if (argv[optindex]) {
819  ret = opt_default(NULL, opt, argv[optindex]);
820  if (ret >= 0) {
821  av_log(NULL, AV_LOG_DEBUG, " matched as AVOption '%s' with "
822  "argument '%s'.\n", opt, argv[optindex]);
823  optindex++;
824  continue;
825  } else if (ret != AVERROR_OPTION_NOT_FOUND) {
826  av_log(NULL, AV_LOG_ERROR, "Error parsing option '%s' "
827  "with argument '%s'.\n", opt, argv[optindex]);
828  return ret;
829  }
830  }
831 
832  /* boolean -nofoo options */
833  if (opt[0] == 'n' && opt[1] == 'o' &&
834  (po = find_option(options, opt + 2)) &&
835  po->name && po->flags & OPT_BOOL) {
836  add_opt(octx, po, opt, "0");
837  av_log(NULL, AV_LOG_DEBUG, " matched as option '%s' (%s) with "
838  "argument 0.\n", po->name, po->help);
839  continue;
840  }
841 
842  av_log(NULL, AV_LOG_ERROR, "Unrecognized option '%s'.\n", opt);
844  }
845 
847  av_log(NULL, AV_LOG_WARNING, "Trailing option(s) found in the "
848  "command: may be ignored.\n");
849 
850  av_log(NULL, AV_LOG_DEBUG, "Finished splitting the commandline.\n");
851 
852  return 0;
853 }
854 
855 int opt_cpuflags(void *optctx, const char *opt, const char *arg)
856 {
857  int ret;
858  unsigned flags = av_get_cpu_flags();
859 
860  if ((ret = av_parse_cpu_caps(&flags, arg)) < 0)
861  return ret;
862 
864  return 0;
865 }
866 
867 int opt_loglevel(void *optctx, const char *opt, const char *arg)
868 {
869  const struct { const char *name; int level; } log_levels[] = {
870  { "quiet" , AV_LOG_QUIET },
871  { "panic" , AV_LOG_PANIC },
872  { "fatal" , AV_LOG_FATAL },
873  { "error" , AV_LOG_ERROR },
874  { "warning", AV_LOG_WARNING },
875  { "info" , AV_LOG_INFO },
876  { "verbose", AV_LOG_VERBOSE },
877  { "debug" , AV_LOG_DEBUG },
878  { "trace" , AV_LOG_TRACE },
879  };
880  const char *token;
881  char *tail;
882  int flags = av_log_get_flags();
883  int level = av_log_get_level();
884  int cmd, i = 0;
885 
886  av_assert0(arg);
887  while (*arg) {
888  token = arg;
889  if (*token == '+' || *token == '-') {
890  cmd = *token++;
891  } else {
892  cmd = 0;
893  }
894  if (!i && !cmd) {
895  flags = 0; /* missing relative prefix, build absolute value */
896  }
897  if (!strncmp(token, "repeat", 6)) {
898  if (cmd == '-') {
900  } else {
902  }
903  arg = token + 6;
904  } else if (!strncmp(token, "level", 5)) {
905  if (cmd == '-') {
907  } else {
909  }
910  arg = token + 5;
911  } else {
912  break;
913  }
914  i++;
915  }
916  if (!*arg) {
917  goto end;
918  } else if (*arg == '+') {
919  arg++;
920  } else if (!i) {
921  flags = av_log_get_flags(); /* level value without prefix, reset flags */
922  }
923 
924  for (i = 0; i < FF_ARRAY_ELEMS(log_levels); i++) {
925  if (!strcmp(log_levels[i].name, arg)) {
926  level = log_levels[i].level;
927  goto end;
928  }
929  }
930 
931  level = strtol(arg, &tail, 10);
932  if (*tail) {
933  av_log(NULL, AV_LOG_FATAL, "Invalid loglevel \"%s\". "
934  "Possible levels are numbers or:\n", arg);
935  for (i = 0; i < FF_ARRAY_ELEMS(log_levels); i++)
936  av_log(NULL, AV_LOG_FATAL, "\"%s\"\n", log_levels[i].name);
937  exit_program(1);
938  }
939 
940 end:
943  return 0;
944 }
945 
946 static void expand_filename_template(AVBPrint *bp, const char *template,
947  struct tm *tm)
948 {
949  int c;
950 
951  while ((c = *(template++))) {
952  if (c == '%') {
953  if (!(c = *(template++)))
954  break;
955  switch (c) {
956  case 'p':
957  av_bprintf(bp, "%s", program_name);
958  break;
959  case 't':
960  av_bprintf(bp, "%04d%02d%02d-%02d%02d%02d",
961  tm->tm_year + 1900, tm->tm_mon + 1, tm->tm_mday,
962  tm->tm_hour, tm->tm_min, tm->tm_sec);
963  break;
964  case '%':
965  av_bprint_chars(bp, c, 1);
966  break;
967  }
968  } else {
969  av_bprint_chars(bp, c, 1);
970  }
971  }
972 }
973 
974 static int init_report(const char *env)
975 {
976  char *filename_template = NULL;
977  char *key, *val;
978  int ret, count = 0;
979  int prog_loglevel, envlevel = 0;
980  time_t now;
981  struct tm *tm;
982  AVBPrint filename;
983 
984  if (report_file) /* already opened */
985  return 0;
986  time(&now);
987  tm = localtime(&now);
988 
989  while (env && *env) {
990  if ((ret = av_opt_get_key_value(&env, "=", ":", 0, &key, &val)) < 0) {
991  if (count)
993  "Failed to parse FFREPORT environment variable: %s\n",
994  av_err2str(ret));
995  break;
996  }
997  if (*env)
998  env++;
999  count++;
1000  if (!strcmp(key, "file")) {
1001  av_free(filename_template);
1002  filename_template = val;
1003  val = NULL;
1004  } else if (!strcmp(key, "level")) {
1005  char *tail;
1006  report_file_level = strtol(val, &tail, 10);
1007  if (*tail) {
1008  av_log(NULL, AV_LOG_FATAL, "Invalid report file level\n");
1009  exit_program(1);
1010  }
1011  envlevel = 1;
1012  } else {
1013  av_log(NULL, AV_LOG_ERROR, "Unknown key '%s' in FFREPORT\n", key);
1014  }
1015  av_free(val);
1016  av_free(key);
1017  }
1018 
1020  expand_filename_template(&filename,
1021  av_x_if_null(filename_template, "%p-%t.log"), tm);
1022  av_free(filename_template);
1023  if (!av_bprint_is_complete(&filename)) {
1024  av_log(NULL, AV_LOG_ERROR, "Out of memory building report file name\n");
1025  return AVERROR(ENOMEM);
1026  }
1027 
1028  prog_loglevel = av_log_get_level();
1029  if (!envlevel)
1030  report_file_level = FFMAX(report_file_level, prog_loglevel);
1031 
1032  report_file = fopen(filename.str, "w");
1033  if (!report_file) {
1034  int ret = AVERROR(errno);
1035  av_log(NULL, AV_LOG_ERROR, "Failed to open report \"%s\": %s\n",
1036  filename.str, strerror(errno));
1037  return ret;
1038  }
1041  "%s started on %04d-%02d-%02d at %02d:%02d:%02d\n"
1042  "Report written to \"%s\"\n"
1043  "Log level: %d\n",
1044  program_name,
1045  tm->tm_year + 1900, tm->tm_mon + 1, tm->tm_mday,
1046  tm->tm_hour, tm->tm_min, tm->tm_sec,
1047  filename.str, report_file_level);
1048  av_bprint_finalize(&filename, NULL);
1049  return 0;
1050 }
1051 
1052 int opt_report(void *optctx, const char *opt, const char *arg)
1053 {
1054  return init_report(NULL);
1055 }
1056 
1057 int opt_max_alloc(void *optctx, const char *opt, const char *arg)
1058 {
1059  char *tail;
1060  size_t max;
1061 
1062  max = strtol(arg, &tail, 10);
1063  if (*tail) {
1064  av_log(NULL, AV_LOG_FATAL, "Invalid max_alloc \"%s\".\n", arg);
1065  exit_program(1);
1066  }
1067  av_max_alloc(max);
1068  return 0;
1069 }
1070 
1071 int opt_timelimit(void *optctx, const char *opt, const char *arg)
1072 {
1073 #if HAVE_SETRLIMIT
1074  int lim = parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
1075  struct rlimit rl = { lim, lim + 1 };
1076  if (setrlimit(RLIMIT_CPU, &rl))
1077  perror("setrlimit");
1078 #else
1079  av_log(NULL, AV_LOG_WARNING, "-%s not implemented on this OS\n", opt);
1080 #endif
1081  return 0;
1082 }
1083 
1084 void print_error(const char *filename, int err)
1085 {
1086  char errbuf[128];
1087  const char *errbuf_ptr = errbuf;
1088 
1089  if (av_strerror(err, errbuf, sizeof(errbuf)) < 0)
1090  errbuf_ptr = strerror(AVUNERROR(err));
1091  av_log(NULL, AV_LOG_ERROR, "%s: %s\n", filename, errbuf_ptr);
1092 }
1093 
1094 static int warned_cfg = 0;
1095 
1096 #define INDENT 1
1097 #define SHOW_VERSION 2
1098 #define SHOW_CONFIG 4
1099 #define SHOW_COPYRIGHT 8
1100 
1101 #define PRINT_LIB_INFO(libname, LIBNAME, flags, level) \
1102  if (CONFIG_##LIBNAME) { \
1103  const char *indent = flags & INDENT? " " : ""; \
1104  if (flags & SHOW_VERSION) { \
1105  unsigned int version = libname##_version(); \
1106  av_log(NULL, level, \
1107  "%slib%-11s %2d.%3d.%3d / %2d.%3d.%3d\n", \
1108  indent, #libname, \
1109  LIB##LIBNAME##_VERSION_MAJOR, \
1110  LIB##LIBNAME##_VERSION_MINOR, \
1111  LIB##LIBNAME##_VERSION_MICRO, \
1112  AV_VERSION_MAJOR(version), AV_VERSION_MINOR(version),\
1113  AV_VERSION_MICRO(version)); \
1114  } \
1115  if (flags & SHOW_CONFIG) { \
1116  const char *cfg = libname##_configuration(); \
1117  if (strcmp(FFMPEG_CONFIGURATION, cfg)) { \
1118  if (!warned_cfg) { \
1119  av_log(NULL, level, \
1120  "%sWARNING: library configuration mismatch\n", \
1121  indent); \
1122  warned_cfg = 1; \
1123  } \
1124  av_log(NULL, level, "%s%-11s configuration: %s\n", \
1125  indent, #libname, cfg); \
1126  } \
1127  } \
1128  } \
1129 
1130 static void print_all_libs_info(int flags, int level)
1131 {
1132  PRINT_LIB_INFO(avutil, AVUTIL, flags, level);
1133  PRINT_LIB_INFO(avcodec, AVCODEC, flags, level);
1134  PRINT_LIB_INFO(avformat, AVFORMAT, flags, level);
1135  PRINT_LIB_INFO(avdevice, AVDEVICE, flags, level);
1136  PRINT_LIB_INFO(avfilter, AVFILTER, flags, level);
1137  PRINT_LIB_INFO(avresample, AVRESAMPLE, flags, level);
1138  PRINT_LIB_INFO(swscale, SWSCALE, flags, level);
1139  PRINT_LIB_INFO(swresample, SWRESAMPLE, flags, level);
1140  PRINT_LIB_INFO(postproc, POSTPROC, flags, level);
1141 }
1142 
1143 static void print_program_info(int flags, int level)
1144 {
1145  const char *indent = flags & INDENT? " " : "";
1146 
1147  av_log(NULL, level, "%s version " FFMPEG_VERSION, program_name);
1148  if (flags & SHOW_COPYRIGHT)
1149  av_log(NULL, level, " Copyright (c) %d-%d the FFmpeg developers",
1150  program_birth_year, CONFIG_THIS_YEAR);
1151  av_log(NULL, level, "\n");
1152  av_log(NULL, level, "%sbuilt with %s\n", indent, CC_IDENT);
1153 
1154  av_log(NULL, level, "%sconfiguration: " FFMPEG_CONFIGURATION "\n", indent);
1155 }
1156 
1157 static void print_buildconf(int flags, int level)
1158 {
1159  const char *indent = flags & INDENT ? " " : "";
1160  char str[] = { FFMPEG_CONFIGURATION };
1161  char *conflist, *remove_tilde, *splitconf;
1162 
1163  // Change all the ' --' strings to '~--' so that
1164  // they can be identified as tokens.
1165  while ((conflist = strstr(str, " --")) != NULL) {
1166  conflist[0] = '~';
1167  }
1168 
1169  // Compensate for the weirdness this would cause
1170  // when passing 'pkg-config --static'.
1171  while ((remove_tilde = strstr(str, "pkg-config~")) != NULL) {
1172  remove_tilde[sizeof("pkg-config~") - 2] = ' ';
1173  }
1174 
1175  splitconf = strtok(str, "~");
1176  av_log(NULL, level, "\n%sconfiguration:\n", indent);
1177  while (splitconf != NULL) {
1178  av_log(NULL, level, "%s%s%s\n", indent, indent, splitconf);
1179  splitconf = strtok(NULL, "~");
1180  }
1181 }
1182 
1183 void show_banner(int argc, char **argv, const OptionDef *options)
1184 {
1185  int idx = locate_option(argc, argv, options, "version");
1186  if (hide_banner || idx)
1187  return;
1188 
1192 }
1193 
1194 int show_version(void *optctx, const char *opt, const char *arg)
1195 {
1199 
1200  return 0;
1201 }
1202 
1203 int show_buildconf(void *optctx, const char *opt, const char *arg)
1204 {
1207 
1208  return 0;
1209 }
1210 
1211 int show_license(void *optctx, const char *opt, const char *arg)
1212 {
1213 #if CONFIG_NONFREE
1214  printf(
1215  "This version of %s has nonfree parts compiled in.\n"
1216  "Therefore it is not legally redistributable.\n",
1217  program_name );
1218 #elif CONFIG_GPLV3
1219  printf(
1220  "%s is free software; you can redistribute it and/or modify\n"
1221  "it under the terms of the GNU General Public License as published by\n"
1222  "the Free Software Foundation; either version 3 of the License, or\n"
1223  "(at your option) any later version.\n"
1224  "\n"
1225  "%s is distributed in the hope that it will be useful,\n"
1226  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1227  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
1228  "GNU General Public License for more details.\n"
1229  "\n"
1230  "You should have received a copy of the GNU General Public License\n"
1231  "along with %s. If not, see <http://www.gnu.org/licenses/>.\n",
1233 #elif CONFIG_GPL
1234  printf(
1235  "%s is free software; you can redistribute it and/or modify\n"
1236  "it under the terms of the GNU General Public License as published by\n"
1237  "the Free Software Foundation; either version 2 of the License, or\n"
1238  "(at your option) any later version.\n"
1239  "\n"
1240  "%s is distributed in the hope that it will be useful,\n"
1241  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1242  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
1243  "GNU General Public License for more details.\n"
1244  "\n"
1245  "You should have received a copy of the GNU General Public License\n"
1246  "along with %s; if not, write to the Free Software\n"
1247  "Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n",
1249 #elif CONFIG_LGPLV3
1250  printf(
1251  "%s is free software; you can redistribute it and/or modify\n"
1252  "it under the terms of the GNU Lesser General Public License as published by\n"
1253  "the Free Software Foundation; either version 3 of the License, or\n"
1254  "(at your option) any later version.\n"
1255  "\n"
1256  "%s is distributed in the hope that it will be useful,\n"
1257  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1258  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
1259  "GNU Lesser General Public License for more details.\n"
1260  "\n"
1261  "You should have received a copy of the GNU Lesser General Public License\n"
1262  "along with %s. If not, see <http://www.gnu.org/licenses/>.\n",
1264 #else
1265  printf(
1266  "%s is free software; you can redistribute it and/or\n"
1267  "modify it under the terms of the GNU Lesser General Public\n"
1268  "License as published by the Free Software Foundation; either\n"
1269  "version 2.1 of the License, or (at your option) any later version.\n"
1270  "\n"
1271  "%s is distributed in the hope that it will be useful,\n"
1272  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1273  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n"
1274  "Lesser General Public License for more details.\n"
1275  "\n"
1276  "You should have received a copy of the GNU Lesser General Public\n"
1277  "License along with %s; if not, write to the Free Software\n"
1278  "Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n",
1280 #endif
1281 
1282  return 0;
1283 }
1284 
1285 static int is_device(const AVClass *avclass)
1286 {
1287  if (!avclass)
1288  return 0;
1289  return AV_IS_INPUT_DEVICE(avclass->category) || AV_IS_OUTPUT_DEVICE(avclass->category);
1290 }
1291 
1292 static int show_formats_devices(void *optctx, const char *opt, const char *arg, int device_only, int muxdemuxers)
1293 {
1294  void *ifmt_opaque = NULL;
1295  const AVInputFormat *ifmt = NULL;
1296  void *ofmt_opaque = NULL;
1297  const AVOutputFormat *ofmt = NULL;
1298  const char *last_name;
1299  int is_dev;
1300 
1301  printf("%s\n"
1302  " D. = Demuxing supported\n"
1303  " .E = Muxing supported\n"
1304  " --\n", device_only ? "Devices:" : "File formats:");
1305  last_name = "000";
1306  for (;;) {
1307  int decode = 0;
1308  int encode = 0;
1309  const char *name = NULL;
1310  const char *long_name = NULL;
1311 
1312  if (muxdemuxers !=SHOW_DEMUXERS) {
1313  ofmt_opaque = NULL;
1314  while ((ofmt = av_muxer_iterate(&ofmt_opaque))) {
1315  is_dev = is_device(ofmt->priv_class);
1316  if (!is_dev && device_only)
1317  continue;
1318  if ((!name || strcmp(ofmt->name, name) < 0) &&
1319  strcmp(ofmt->name, last_name) > 0) {
1320  name = ofmt->name;
1321  long_name = ofmt->long_name;
1322  encode = 1;
1323  }
1324  }
1325  }
1326  if (muxdemuxers != SHOW_MUXERS) {
1327  ifmt_opaque = NULL;
1328  while ((ifmt = av_demuxer_iterate(&ifmt_opaque))) {
1329  is_dev = is_device(ifmt->priv_class);
1330  if (!is_dev && device_only)
1331  continue;
1332  if ((!name || strcmp(ifmt->name, name) < 0) &&
1333  strcmp(ifmt->name, last_name) > 0) {
1334  name = ifmt->name;
1335  long_name = ifmt->long_name;
1336  encode = 0;
1337  }
1338  if (name && strcmp(ifmt->name, name) == 0)
1339  decode = 1;
1340  }
1341  }
1342  if (!name)
1343  break;
1344  last_name = name;
1345 
1346  printf(" %s%s %-15s %s\n",
1347  decode ? "D" : " ",
1348  encode ? "E" : " ",
1349  name,
1350  long_name ? long_name:" ");
1351  }
1352  return 0;
1353 }
1354 
1355 int show_formats(void *optctx, const char *opt, const char *arg)
1356 {
1357  return show_formats_devices(optctx, opt, arg, 0, SHOW_DEFAULT);
1358 }
1359 
1360 int show_muxers(void *optctx, const char *opt, const char *arg)
1361 {
1362  return show_formats_devices(optctx, opt, arg, 0, SHOW_MUXERS);
1363 }
1364 
1365 int show_demuxers(void *optctx, const char *opt, const char *arg)
1366 {
1367  return show_formats_devices(optctx, opt, arg, 0, SHOW_DEMUXERS);
1368 }
1369 
1370 int show_devices(void *optctx, const char *opt, const char *arg)
1371 {
1372  return show_formats_devices(optctx, opt, arg, 1, SHOW_DEFAULT);
1373 }
1374 
1375 #define PRINT_CODEC_SUPPORTED(codec, field, type, list_name, term, get_name) \
1376  if (codec->field) { \
1377  const type *p = codec->field; \
1378  \
1379  printf(" Supported " list_name ":"); \
1380  while (*p != term) { \
1381  get_name(*p); \
1382  printf(" %s", name); \
1383  p++; \
1384  } \
1385  printf("\n"); \
1386  } \
1387 
1388 static void print_codec(const AVCodec *c)
1389 {
1390  int encoder = av_codec_is_encoder(c);
1391 
1392  printf("%s %s [%s]:\n", encoder ? "Encoder" : "Decoder", c->name,
1393  c->long_name ? c->long_name : "");
1394 
1395  printf(" General capabilities: ");
1396  if (c->capabilities & AV_CODEC_CAP_DRAW_HORIZ_BAND)
1397  printf("horizband ");
1398  if (c->capabilities & AV_CODEC_CAP_DR1)
1399  printf("dr1 ");
1400  if (c->capabilities & AV_CODEC_CAP_TRUNCATED)
1401  printf("trunc ");
1402  if (c->capabilities & AV_CODEC_CAP_DELAY)
1403  printf("delay ");
1404  if (c->capabilities & AV_CODEC_CAP_SMALL_LAST_FRAME)
1405  printf("small ");
1406  if (c->capabilities & AV_CODEC_CAP_SUBFRAMES)
1407  printf("subframes ");
1408  if (c->capabilities & AV_CODEC_CAP_EXPERIMENTAL)
1409  printf("exp ");
1410  if (c->capabilities & AV_CODEC_CAP_CHANNEL_CONF)
1411  printf("chconf ");
1412  if (c->capabilities & AV_CODEC_CAP_PARAM_CHANGE)
1413  printf("paramchange ");
1414  if (c->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE)
1415  printf("variable ");
1416  if (c->capabilities & (AV_CODEC_CAP_FRAME_THREADS |
1419  printf("threads ");
1420  if (c->capabilities & AV_CODEC_CAP_AVOID_PROBING)
1421  printf("avoidprobe ");
1422  if (c->capabilities & AV_CODEC_CAP_HARDWARE)
1423  printf("hardware ");
1424  if (c->capabilities & AV_CODEC_CAP_HYBRID)
1425  printf("hybrid ");
1426  if (!c->capabilities)
1427  printf("none");
1428  printf("\n");
1429 
1430  if (c->type == AVMEDIA_TYPE_VIDEO ||
1431  c->type == AVMEDIA_TYPE_AUDIO) {
1432  printf(" Threading capabilities: ");
1433  switch (c->capabilities & (AV_CODEC_CAP_FRAME_THREADS |
1437  AV_CODEC_CAP_SLICE_THREADS: printf("frame and slice"); break;
1438  case AV_CODEC_CAP_FRAME_THREADS: printf("frame"); break;
1439  case AV_CODEC_CAP_SLICE_THREADS: printf("slice"); break;
1440  case AV_CODEC_CAP_OTHER_THREADS: printf("other"); break;
1441  default: printf("none"); break;
1442  }
1443  printf("\n");
1444  }
1445 
1446  if (avcodec_get_hw_config(c, 0)) {
1447  printf(" Supported hardware devices: ");
1448  for (int i = 0;; i++) {
1449  const AVCodecHWConfig *config = avcodec_get_hw_config(c, i);
1450  if (!config)
1451  break;
1452  printf("%s ", av_hwdevice_get_type_name(config->device_type));
1453  }
1454  printf("\n");
1455  }
1456 
1457  if (c->supported_framerates) {
1458  const AVRational *fps = c->supported_framerates;
1459 
1460  printf(" Supported framerates:");
1461  while (fps->num) {
1462  printf(" %d/%d", fps->num, fps->den);
1463  fps++;
1464  }
1465  printf("\n");
1466  }
1467  PRINT_CODEC_SUPPORTED(c, pix_fmts, enum AVPixelFormat, "pixel formats",
1469  PRINT_CODEC_SUPPORTED(c, supported_samplerates, int, "sample rates", 0,
1471  PRINT_CODEC_SUPPORTED(c, sample_fmts, enum AVSampleFormat, "sample formats",
1473  PRINT_CODEC_SUPPORTED(c, channel_layouts, uint64_t, "channel layouts",
1474  0, GET_CH_LAYOUT_DESC);
1475 
1476  if (c->priv_class) {
1477  show_help_children(c->priv_class,
1480  }
1481 }
1482 
1484 {
1485  switch (type) {
1486  case AVMEDIA_TYPE_VIDEO: return 'V';
1487  case AVMEDIA_TYPE_AUDIO: return 'A';
1488  case AVMEDIA_TYPE_DATA: return 'D';
1489  case AVMEDIA_TYPE_SUBTITLE: return 'S';
1490  case AVMEDIA_TYPE_ATTACHMENT:return 'T';
1491  default: return '?';
1492  }
1493 }
1494 
1495 static const AVCodec *next_codec_for_id(enum AVCodecID id, void **iter,
1496  int encoder)
1497 {
1498  const AVCodec *c;
1499  while ((c = av_codec_iterate(iter))) {
1500  if (c->id == id &&
1501  (encoder ? av_codec_is_encoder(c) : av_codec_is_decoder(c)))
1502  return c;
1503  }
1504  return NULL;
1505 }
1506 
1507 static int compare_codec_desc(const void *a, const void *b)
1508 {
1509  const AVCodecDescriptor * const *da = a;
1510  const AVCodecDescriptor * const *db = b;
1511 
1512  return (*da)->type != (*db)->type ? FFDIFFSIGN((*da)->type, (*db)->type) :
1513  strcmp((*da)->name, (*db)->name);
1514 }
1515 
1516 static unsigned get_codecs_sorted(const AVCodecDescriptor ***rcodecs)
1517 {
1518  const AVCodecDescriptor *desc = NULL;
1519  const AVCodecDescriptor **codecs;
1520  unsigned nb_codecs = 0, i = 0;
1521 
1522  while ((desc = avcodec_descriptor_next(desc)))
1523  nb_codecs++;
1524  if (!(codecs = av_calloc(nb_codecs, sizeof(*codecs)))) {
1525  av_log(NULL, AV_LOG_ERROR, "Out of memory\n");
1526  exit_program(1);
1527  }
1528  desc = NULL;
1529  while ((desc = avcodec_descriptor_next(desc)))
1530  codecs[i++] = desc;
1531  av_assert0(i == nb_codecs);
1532  qsort(codecs, nb_codecs, sizeof(*codecs), compare_codec_desc);
1533  *rcodecs = codecs;
1534  return nb_codecs;
1535 }
1536 
1537 static void print_codecs_for_id(enum AVCodecID id, int encoder)
1538 {
1539  void *iter = NULL;
1540  const AVCodec *codec;
1541 
1542  printf(" (%s: ", encoder ? "encoders" : "decoders");
1543 
1544  while ((codec = next_codec_for_id(id, &iter, encoder)))
1545  printf("%s ", codec->name);
1546 
1547  printf(")");
1548 }
1549 
1550 int show_codecs(void *optctx, const char *opt, const char *arg)
1551 {
1552  const AVCodecDescriptor **codecs;
1553  unsigned i, nb_codecs = get_codecs_sorted(&codecs);
1554 
1555  printf("Codecs:\n"
1556  " D..... = Decoding supported\n"
1557  " .E.... = Encoding supported\n"
1558  " ..V... = Video codec\n"
1559  " ..A... = Audio codec\n"
1560  " ..S... = Subtitle codec\n"
1561  " ...I.. = Intra frame-only codec\n"
1562  " ....L. = Lossy compression\n"
1563  " .....S = Lossless compression\n"
1564  " -------\n");
1565  for (i = 0; i < nb_codecs; i++) {
1566  const AVCodecDescriptor *desc = codecs[i];
1567  const AVCodec *codec;
1568  void *iter = NULL;
1569 
1570  if (strstr(desc->name, "_deprecated"))
1571  continue;
1572 
1573  printf(" ");
1574  printf(avcodec_find_decoder(desc->id) ? "D" : ".");
1575  printf(avcodec_find_encoder(desc->id) ? "E" : ".");
1576 
1577  printf("%c", get_media_type_char(desc->type));
1578  printf((desc->props & AV_CODEC_PROP_INTRA_ONLY) ? "I" : ".");
1579  printf((desc->props & AV_CODEC_PROP_LOSSY) ? "L" : ".");
1580  printf((desc->props & AV_CODEC_PROP_LOSSLESS) ? "S" : ".");
1581 
1582  printf(" %-20s %s", desc->name, desc->long_name ? desc->long_name : "");
1583 
1584  /* print decoders/encoders when there's more than one or their
1585  * names are different from codec name */
1586  while ((codec = next_codec_for_id(desc->id, &iter, 0))) {
1587  if (strcmp(codec->name, desc->name)) {
1588  print_codecs_for_id(desc->id, 0);
1589  break;
1590  }
1591  }
1592  iter = NULL;
1593  while ((codec = next_codec_for_id(desc->id, &iter, 1))) {
1594  if (strcmp(codec->name, desc->name)) {
1595  print_codecs_for_id(desc->id, 1);
1596  break;
1597  }
1598  }
1599 
1600  printf("\n");
1601  }
1602  av_free(codecs);
1603  return 0;
1604 }
1605 
1606 static void print_codecs(int encoder)
1607 {
1608  const AVCodecDescriptor **codecs;
1609  unsigned i, nb_codecs = get_codecs_sorted(&codecs);
1610 
1611  printf("%s:\n"
1612  " V..... = Video\n"
1613  " A..... = Audio\n"
1614  " S..... = Subtitle\n"
1615  " .F.... = Frame-level multithreading\n"
1616  " ..S... = Slice-level multithreading\n"
1617  " ...X.. = Codec is experimental\n"
1618  " ....B. = Supports draw_horiz_band\n"
1619  " .....D = Supports direct rendering method 1\n"
1620  " ------\n",
1621  encoder ? "Encoders" : "Decoders");
1622  for (i = 0; i < nb_codecs; i++) {
1623  const AVCodecDescriptor *desc = codecs[i];
1624  const AVCodec *codec;
1625  void *iter = NULL;
1626 
1627  while ((codec = next_codec_for_id(desc->id, &iter, encoder))) {
1628  printf(" %c", get_media_type_char(desc->type));
1629  printf((codec->capabilities & AV_CODEC_CAP_FRAME_THREADS) ? "F" : ".");
1630  printf((codec->capabilities & AV_CODEC_CAP_SLICE_THREADS) ? "S" : ".");
1631  printf((codec->capabilities & AV_CODEC_CAP_EXPERIMENTAL) ? "X" : ".");
1632  printf((codec->capabilities & AV_CODEC_CAP_DRAW_HORIZ_BAND)?"B" : ".");
1633  printf((codec->capabilities & AV_CODEC_CAP_DR1) ? "D" : ".");
1634 
1635  printf(" %-20s %s", codec->name, codec->long_name ? codec->long_name : "");
1636  if (strcmp(codec->name, desc->name))
1637  printf(" (codec %s)", desc->name);
1638 
1639  printf("\n");
1640  }
1641  }
1642  av_free(codecs);
1643 }
1644 
1645 int show_decoders(void *optctx, const char *opt, const char *arg)
1646 {
1647  print_codecs(0);
1648  return 0;
1649 }
1650 
1651 int show_encoders(void *optctx, const char *opt, const char *arg)
1652 {
1653  print_codecs(1);
1654  return 0;
1655 }
1656 
1657 int show_bsfs(void *optctx, const char *opt, const char *arg)
1658 {
1659  const AVBitStreamFilter *bsf = NULL;
1660  void *opaque = NULL;
1661 
1662  printf("Bitstream filters:\n");
1663  while ((bsf = av_bsf_iterate(&opaque)))
1664  printf("%s\n", bsf->name);
1665  printf("\n");
1666  return 0;
1667 }
1668 
1669 int show_protocols(void *optctx, const char *opt, const char *arg)
1670 {
1671  void *opaque = NULL;
1672  const char *name;
1673 
1674  printf("Supported file protocols:\n"
1675  "Input:\n");
1676  while ((name = avio_enum_protocols(&opaque, 0)))
1677  printf(" %s\n", name);
1678  printf("Output:\n");
1679  while ((name = avio_enum_protocols(&opaque, 1)))
1680  printf(" %s\n", name);
1681  return 0;
1682 }
1683 
1684 int show_filters(void *optctx, const char *opt, const char *arg)
1685 {
1686 #if CONFIG_AVFILTER
1687  const AVFilter *filter = NULL;
1688  char descr[64], *descr_cur;
1689  void *opaque = NULL;
1690  int i, j;
1691  const AVFilterPad *pad;
1692 
1693  printf("Filters:\n"
1694  " T.. = Timeline support\n"
1695  " .S. = Slice threading\n"
1696  " ..C = Command support\n"
1697  " A = Audio input/output\n"
1698  " V = Video input/output\n"
1699  " N = Dynamic number and/or type of input/output\n"
1700  " | = Source or sink filter\n");
1701  while ((filter = av_filter_iterate(&opaque))) {
1702  descr_cur = descr;
1703  for (i = 0; i < 2; i++) {
1704  if (i) {
1705  *(descr_cur++) = '-';
1706  *(descr_cur++) = '>';
1707  }
1708  pad = i ? filter->outputs : filter->inputs;
1709  for (j = 0; pad && avfilter_pad_get_name(pad, j); j++) {
1710  if (descr_cur >= descr + sizeof(descr) - 4)
1711  break;
1712  *(descr_cur++) = get_media_type_char(avfilter_pad_get_type(pad, j));
1713  }
1714  if (!j)
1715  *(descr_cur++) = ((!i && (filter->flags & AVFILTER_FLAG_DYNAMIC_INPUTS)) ||
1716  ( i && (filter->flags & AVFILTER_FLAG_DYNAMIC_OUTPUTS))) ? 'N' : '|';
1717  }
1718  *descr_cur = 0;
1719  printf(" %c%c%c %-17s %-10s %s\n",
1720  filter->flags & AVFILTER_FLAG_SUPPORT_TIMELINE ? 'T' : '.',
1721  filter->flags & AVFILTER_FLAG_SLICE_THREADS ? 'S' : '.',
1722  filter->process_command ? 'C' : '.',
1723  filter->name, descr, filter->description);
1724  }
1725 #else
1726  printf("No filters available: libavfilter disabled\n");
1727 #endif
1728  return 0;
1729 }
1730 
1731 int show_colors(void *optctx, const char *opt, const char *arg)
1732 {
1733  const char *name;
1734  const uint8_t *rgb;
1735  int i;
1736 
1737  printf("%-32s #RRGGBB\n", "name");
1738 
1739  for (i = 0; name = av_get_known_color_name(i, &rgb); i++)
1740  printf("%-32s #%02x%02x%02x\n", name, rgb[0], rgb[1], rgb[2]);
1741 
1742  return 0;
1743 }
1744 
1745 int show_pix_fmts(void *optctx, const char *opt, const char *arg)
1746 {
1747  const AVPixFmtDescriptor *pix_desc = NULL;
1748 
1749  printf("Pixel formats:\n"
1750  "I.... = Supported Input format for conversion\n"
1751  ".O... = Supported Output format for conversion\n"
1752  "..H.. = Hardware accelerated format\n"
1753  "...P. = Paletted format\n"
1754  "....B = Bitstream format\n"
1755  "FLAGS NAME NB_COMPONENTS BITS_PER_PIXEL\n"
1756  "-----\n");
1757 
1758 #if !CONFIG_SWSCALE
1759 # define sws_isSupportedInput(x) 0
1760 # define sws_isSupportedOutput(x) 0
1761 #endif
1762 
1763  while ((pix_desc = av_pix_fmt_desc_next(pix_desc))) {
1765  printf("%c%c%c%c%c %-16s %d %2d\n",
1766  sws_isSupportedInput (pix_fmt) ? 'I' : '.',
1767  sws_isSupportedOutput(pix_fmt) ? 'O' : '.',
1768  pix_desc->flags & AV_PIX_FMT_FLAG_HWACCEL ? 'H' : '.',
1769  pix_desc->flags & AV_PIX_FMT_FLAG_PAL ? 'P' : '.',
1770  pix_desc->flags & AV_PIX_FMT_FLAG_BITSTREAM ? 'B' : '.',
1771  pix_desc->name,
1772  pix_desc->nb_components,
1773  av_get_bits_per_pixel(pix_desc));
1774  }
1775  return 0;
1776 }
1777 
1778 int show_layouts(void *optctx, const char *opt, const char *arg)
1779 {
1780  int i = 0;
1781  uint64_t layout, j;
1782  const char *name, *descr;
1783 
1784  printf("Individual channels:\n"
1785  "NAME DESCRIPTION\n");
1786  for (i = 0; i < 63; i++) {
1787  name = av_get_channel_name((uint64_t)1 << i);
1788  if (!name)
1789  continue;
1790  descr = av_get_channel_description((uint64_t)1 << i);
1791  printf("%-14s %s\n", name, descr);
1792  }
1793  printf("\nStandard channel layouts:\n"
1794  "NAME DECOMPOSITION\n");
1795  for (i = 0; !av_get_standard_channel_layout(i, &layout, &name); i++) {
1796  if (name) {
1797  printf("%-14s ", name);
1798  for (j = 1; j; j <<= 1)
1799  if ((layout & j))
1800  printf("%s%s", (layout & (j - 1)) ? "+" : "", av_get_channel_name(j));
1801  printf("\n");
1802  }
1803  }
1804  return 0;
1805 }
1806 
1807 int show_sample_fmts(void *optctx, const char *opt, const char *arg)
1808 {
1809  int i;
1810  char fmt_str[128];
1811  for (i = -1; i < AV_SAMPLE_FMT_NB; i++)
1812  printf("%s\n", av_get_sample_fmt_string(fmt_str, sizeof(fmt_str), i));
1813  return 0;
1814 }
1815 
1816 static void show_help_codec(const char *name, int encoder)
1817 {
1818  const AVCodecDescriptor *desc;
1819  const AVCodec *codec;
1820 
1821  if (!name) {
1822  av_log(NULL, AV_LOG_ERROR, "No codec name specified.\n");
1823  return;
1824  }
1825 
1826  codec = encoder ? avcodec_find_encoder_by_name(name) :
1828 
1829  if (codec)
1830  print_codec(codec);
1831  else if ((desc = avcodec_descriptor_get_by_name(name))) {
1832  void *iter = NULL;
1833  int printed = 0;
1834 
1835  while ((codec = next_codec_for_id(desc->id, &iter, encoder))) {
1836  printed = 1;
1837  print_codec(codec);
1838  }
1839 
1840  if (!printed) {
1841  av_log(NULL, AV_LOG_ERROR, "Codec '%s' is known to FFmpeg, "
1842  "but no %s for it are available. FFmpeg might need to be "
1843  "recompiled with additional external libraries.\n",
1844  name, encoder ? "encoders" : "decoders");
1845  }
1846  } else {
1847  av_log(NULL, AV_LOG_ERROR, "Codec '%s' is not recognized by FFmpeg.\n",
1848  name);
1849  }
1850 }
1851 
1852 static void show_help_demuxer(const char *name)
1853 {
1854  const AVInputFormat *fmt = av_find_input_format(name);
1855 
1856  if (!fmt) {
1857  av_log(NULL, AV_LOG_ERROR, "Unknown format '%s'.\n", name);
1858  return;
1859  }
1860 
1861  printf("Demuxer %s [%s]:\n", fmt->name, fmt->long_name);
1862 
1863  if (fmt->extensions)
1864  printf(" Common extensions: %s.\n", fmt->extensions);
1865 
1866  if (fmt->priv_class)
1868 }
1869 
1870 static void show_help_protocol(const char *name)
1871 {
1872  const AVClass *proto_class;
1873 
1874  if (!name) {
1875  av_log(NULL, AV_LOG_ERROR, "No protocol name specified.\n");
1876  return;
1877  }
1878 
1879  proto_class = avio_protocol_get_class(name);
1880  if (!proto_class) {
1881  av_log(NULL, AV_LOG_ERROR, "Unknown protocol '%s'.\n", name);
1882  return;
1883  }
1884 
1886 }
1887 
1888 static void show_help_muxer(const char *name)
1889 {
1890  const AVCodecDescriptor *desc;
1891  const AVOutputFormat *fmt = av_guess_format(name, NULL, NULL);
1892 
1893  if (!fmt) {
1894  av_log(NULL, AV_LOG_ERROR, "Unknown format '%s'.\n", name);
1895  return;
1896  }
1897 
1898  printf("Muxer %s [%s]:\n", fmt->name, fmt->long_name);
1899 
1900  if (fmt->extensions)
1901  printf(" Common extensions: %s.\n", fmt->extensions);
1902  if (fmt->mime_type)
1903  printf(" Mime type: %s.\n", fmt->mime_type);
1904  if (fmt->video_codec != AV_CODEC_ID_NONE &&
1906  printf(" Default video codec: %s.\n", desc->name);
1907  }
1908  if (fmt->audio_codec != AV_CODEC_ID_NONE &&
1910  printf(" Default audio codec: %s.\n", desc->name);
1911  }
1912  if (fmt->subtitle_codec != AV_CODEC_ID_NONE &&
1914  printf(" Default subtitle codec: %s.\n", desc->name);
1915  }
1916 
1917  if (fmt->priv_class)
1919 }
1920 
1921 #if CONFIG_AVFILTER
1922 static void show_help_filter(const char *name)
1923 {
1924 #if CONFIG_AVFILTER
1925  const AVFilter *f = avfilter_get_by_name(name);
1926  int i, count;
1927 
1928  if (!name) {
1929  av_log(NULL, AV_LOG_ERROR, "No filter name specified.\n");
1930  return;
1931  } else if (!f) {
1932  av_log(NULL, AV_LOG_ERROR, "Unknown filter '%s'.\n", name);
1933  return;
1934  }
1935 
1936  printf("Filter %s\n", f->name);
1937  if (f->description)
1938  printf(" %s\n", f->description);
1939 
1940  if (f->flags & AVFILTER_FLAG_SLICE_THREADS)
1941  printf(" slice threading supported\n");
1942 
1943  printf(" Inputs:\n");
1944  count = avfilter_pad_count(f->inputs);
1945  for (i = 0; i < count; i++) {
1946  printf(" #%d: %s (%s)\n", i, avfilter_pad_get_name(f->inputs, i),
1948  }
1949  if (f->flags & AVFILTER_FLAG_DYNAMIC_INPUTS)
1950  printf(" dynamic (depending on the options)\n");
1951  else if (!count)
1952  printf(" none (source filter)\n");
1953 
1954  printf(" Outputs:\n");
1955  count = avfilter_pad_count(f->outputs);
1956  for (i = 0; i < count; i++) {
1957  printf(" #%d: %s (%s)\n", i, avfilter_pad_get_name(f->outputs, i),
1959  }
1960  if (f->flags & AVFILTER_FLAG_DYNAMIC_OUTPUTS)
1961  printf(" dynamic (depending on the options)\n");
1962  else if (!count)
1963  printf(" none (sink filter)\n");
1964 
1965  if (f->priv_class)
1968  if (f->flags & AVFILTER_FLAG_SUPPORT_TIMELINE)
1969  printf("This filter has support for timeline through the 'enable' option.\n");
1970 #else
1971  av_log(NULL, AV_LOG_ERROR, "Build without libavfilter; "
1972  "can not to satisfy request\n");
1973 #endif
1974 }
1975 #endif
1976 
1977 static void show_help_bsf(const char *name)
1978 {
1980 
1981  if (!name) {
1982  av_log(NULL, AV_LOG_ERROR, "No bitstream filter name specified.\n");
1983  return;
1984  } else if (!bsf) {
1985  av_log(NULL, AV_LOG_ERROR, "Unknown bit stream filter '%s'.\n", name);
1986  return;
1987  }
1988 
1989  printf("Bit stream filter %s\n", bsf->name);
1990  PRINT_CODEC_SUPPORTED(bsf, codec_ids, enum AVCodecID, "codecs",
1992  if (bsf->priv_class)
1994 }
1995 
1996 int show_help(void *optctx, const char *opt, const char *arg)
1997 {
1998  char *topic, *par;
2000 
2001  topic = av_strdup(arg ? arg : "");
2002  if (!topic)
2003  return AVERROR(ENOMEM);
2004  par = strchr(topic, '=');
2005  if (par)
2006  *par++ = 0;
2007 
2008  if (!*topic) {
2009  show_help_default(topic, par);
2010  } else if (!strcmp(topic, "decoder")) {
2011  show_help_codec(par, 0);
2012  } else if (!strcmp(topic, "encoder")) {
2013  show_help_codec(par, 1);
2014  } else if (!strcmp(topic, "demuxer")) {
2015  show_help_demuxer(par);
2016  } else if (!strcmp(topic, "muxer")) {
2017  show_help_muxer(par);
2018  } else if (!strcmp(topic, "protocol")) {
2019  show_help_protocol(par);
2020 #if CONFIG_AVFILTER
2021  } else if (!strcmp(topic, "filter")) {
2022  show_help_filter(par);
2023 #endif
2024  } else if (!strcmp(topic, "bsf")) {
2025  show_help_bsf(par);
2026  } else {
2027  show_help_default(topic, par);
2028  }
2029 
2030  av_freep(&topic);
2031  return 0;
2032 }
2033 
2034 int read_yesno(void)
2035 {
2036  int c = getchar();
2037  int yesno = (av_toupper(c) == 'Y');
2038 
2039  while (c != '\n' && c != EOF)
2040  c = getchar();
2041 
2042  return yesno;
2043 }
2044 
2045 FILE *get_preset_file(char *filename, size_t filename_size,
2046  const char *preset_name, int is_path,
2047  const char *codec_name)
2048 {
2049  FILE *f = NULL;
2050  int i;
2051  const char *base[3] = { getenv("FFMPEG_DATADIR"),
2052  getenv("HOME"),
2053  FFMPEG_DATADIR, };
2054 
2055  if (is_path) {
2056  av_strlcpy(filename, preset_name, filename_size);
2057  f = fopen(filename, "r");
2058  } else {
2059 #if HAVE_GETMODULEHANDLE && defined(_WIN32)
2060  char datadir[MAX_PATH], *ls;
2061  base[2] = NULL;
2062 
2063  if (GetModuleFileNameA(GetModuleHandleA(NULL), datadir, sizeof(datadir) - 1))
2064  {
2065  for (ls = datadir; ls < datadir + strlen(datadir); ls++)
2066  if (*ls == '\\') *ls = '/';
2067 
2068  if (ls = strrchr(datadir, '/'))
2069  {
2070  *ls = 0;
2071  strncat(datadir, "/ffpresets", sizeof(datadir) - 1 - strlen(datadir));
2072  base[2] = datadir;
2073  }
2074  }
2075 #endif
2076  for (i = 0; i < 3 && !f; i++) {
2077  if (!base[i])
2078  continue;
2079  snprintf(filename, filename_size, "%s%s/%s.ffpreset", base[i],
2080  i != 1 ? "" : "/.ffmpeg", preset_name);
2081  f = fopen(filename, "r");
2082  if (!f && codec_name) {
2083  snprintf(filename, filename_size,
2084  "%s%s/%s-%s.ffpreset",
2085  base[i], i != 1 ? "" : "/.ffmpeg", codec_name,
2086  preset_name);
2087  f = fopen(filename, "r");
2088  }
2089  }
2090  }
2091 
2092  return f;
2093 }
2094 
2095 int check_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec)
2096 {
2097  int ret = avformat_match_stream_specifier(s, st, spec);
2098  if (ret < 0)
2099  av_log(s, AV_LOG_ERROR, "Invalid stream specifier: %s.\n", spec);
2100  return ret;
2101 }
2102 
2104  AVFormatContext *s, AVStream *st, const AVCodec *codec)
2105 {
2106  AVDictionary *ret = NULL;
2107  AVDictionaryEntry *t = NULL;
2108  int flags = s->oformat ? AV_OPT_FLAG_ENCODING_PARAM
2110  char prefix = 0;
2111  const AVClass *cc = avcodec_get_class();
2112 
2113  if (!codec)
2114  codec = s->oformat ? avcodec_find_encoder(codec_id)
2116 
2117  switch (st->codecpar->codec_type) {
2118  case AVMEDIA_TYPE_VIDEO:
2119  prefix = 'v';
2121  break;
2122  case AVMEDIA_TYPE_AUDIO:
2123  prefix = 'a';
2125  break;
2126  case AVMEDIA_TYPE_SUBTITLE:
2127  prefix = 's';
2129  break;
2130  }
2131 
2132  while (t = av_dict_get(opts, "", t, AV_DICT_IGNORE_SUFFIX)) {
2133  const AVClass *priv_class;
2134  char *p = strchr(t->key, ':');
2135 
2136  /* check stream specification in opt name */
2137  if (p)
2138  switch (check_stream_specifier(s, st, p + 1)) {
2139  case 1: *p = 0; break;
2140  case 0: continue;
2141  default: exit_program(1);
2142  }
2143 
2144  if (av_opt_find(&cc, t->key, NULL, flags, AV_OPT_SEARCH_FAKE_OBJ) ||
2145  !codec ||
2146  ((priv_class = codec->priv_class) &&
2147  av_opt_find(&priv_class, t->key, NULL, flags,
2149  av_dict_set(&ret, t->key, t->value, 0);
2150  else if (t->key[0] == prefix &&
2151  av_opt_find(&cc, t->key + 1, NULL, flags,
2153  av_dict_set(&ret, t->key + 1, t->value, 0);
2154 
2155  if (p)
2156  *p = ':';
2157  }
2158  return ret;
2159 }
2160 
2163 {
2164  int i;
2165  AVDictionary **opts;
2166 
2167  if (!s->nb_streams)
2168  return NULL;
2169  opts = av_mallocz_array(s->nb_streams, sizeof(*opts));
2170  if (!opts) {
2172  "Could not alloc memory for stream options.\n");
2173  return NULL;
2174  }
2175  for (i = 0; i < s->nb_streams; i++)
2176  opts[i] = filter_codec_opts(codec_opts, s->streams[i]->codecpar->codec_id,
2177  s, s->streams[i], NULL);
2178  return opts;
2179 }
2180 
2181 void *grow_array(void *array, int elem_size, int *size, int new_size)
2182 {
2183  if (new_size >= INT_MAX / elem_size) {
2184  av_log(NULL, AV_LOG_ERROR, "Array too big.\n");
2185  exit_program(1);
2186  }
2187  if (*size < new_size) {
2188  uint8_t *tmp = av_realloc_array(array, new_size, elem_size);
2189  if (!tmp) {
2190  av_log(NULL, AV_LOG_ERROR, "Could not alloc buffer.\n");
2191  exit_program(1);
2192  }
2193  memset(tmp + *size*elem_size, 0, (new_size-*size) * elem_size);
2194  *size = new_size;
2195  return tmp;
2196  }
2197  return array;
2198 }
2199 
2201 {
2202  uint8_t* displaymatrix = av_stream_get_side_data(st,
2204  double theta = 0;
2205  if (displaymatrix)
2206  theta = -av_display_rotation_get((int32_t*) displaymatrix);
2207 
2208  theta -= 360*floor(theta/360 + 0.9/360);
2209 
2210  if (fabs(theta - 90*round(theta/90)) > 2)
2211  av_log(NULL, AV_LOG_WARNING, "Odd rotation angle.\n"
2212  "If you want to help, upload a sample "
2213  "of this file to https://streams.videolan.org/upload/ "
2214  "and contact the ffmpeg-devel mailing list. (ffmpeg-devel@ffmpeg.org)");
2215 
2216  return theta;
2217 }
2218 
2219 #if CONFIG_AVDEVICE
2220 static int print_device_sources(AVInputFormat *fmt, AVDictionary *opts)
2221 {
2222  int ret, i;
2223  AVDeviceInfoList *device_list = NULL;
2224 
2225  if (!fmt || !fmt->priv_class || !AV_IS_INPUT_DEVICE(fmt->priv_class->category))
2226  return AVERROR(EINVAL);
2227 
2228  printf("Auto-detected sources for %s:\n", fmt->name);
2229  if (!fmt->get_device_list) {
2230  ret = AVERROR(ENOSYS);
2231  printf("Cannot list sources. Not implemented.\n");
2232  goto fail;
2233  }
2234 
2235  if ((ret = avdevice_list_input_sources(fmt, NULL, opts, &device_list)) < 0) {
2236  printf("Cannot list sources.\n");
2237  goto fail;
2238  }
2239 
2240  for (i = 0; i < device_list->nb_devices; i++) {
2241  printf("%s %s [%s]\n", device_list->default_device == i ? "*" : " ",
2242  device_list->devices[i]->device_name, device_list->devices[i]->device_description);
2243  }
2244 
2245  fail:
2246  avdevice_free_list_devices(&device_list);
2247  return ret;
2248 }
2249 
2250 static int print_device_sinks(AVOutputFormat *fmt, AVDictionary *opts)
2251 {
2252  int ret, i;
2253  AVDeviceInfoList *device_list = NULL;
2254 
2255  if (!fmt || !fmt->priv_class || !AV_IS_OUTPUT_DEVICE(fmt->priv_class->category))
2256  return AVERROR(EINVAL);
2257 
2258  printf("Auto-detected sinks for %s:\n", fmt->name);
2259  if (!fmt->get_device_list) {
2260  ret = AVERROR(ENOSYS);
2261  printf("Cannot list sinks. Not implemented.\n");
2262  goto fail;
2263  }
2264 
2265  if ((ret = avdevice_list_output_sinks(fmt, NULL, opts, &device_list)) < 0) {
2266  printf("Cannot list sinks.\n");
2267  goto fail;
2268  }
2269 
2270  for (i = 0; i < device_list->nb_devices; i++) {
2271  printf("%s %s [%s]\n", device_list->default_device == i ? "*" : " ",
2272  device_list->devices[i]->device_name, device_list->devices[i]->device_description);
2273  }
2274 
2275  fail:
2276  avdevice_free_list_devices(&device_list);
2277  return ret;
2278 }
2279 
2280 static int show_sinks_sources_parse_arg(const char *arg, char **dev, AVDictionary **opts)
2281 {
2282  int ret;
2283  if (arg) {
2284  char *opts_str = NULL;
2285  av_assert0(dev && opts);
2286  *dev = av_strdup(arg);
2287  if (!*dev)
2288  return AVERROR(ENOMEM);
2289  if ((opts_str = strchr(*dev, ','))) {
2290  *(opts_str++) = '\0';
2291  if (opts_str[0] && ((ret = av_dict_parse_string(opts, opts_str, "=", ":", 0)) < 0)) {
2292  av_freep(dev);
2293  return ret;
2294  }
2295  }
2296  } else
2297  printf("\nDevice name is not provided.\n"
2298  "You can pass devicename[,opt1=val1[,opt2=val2...]] as an argument.\n\n");
2299  return 0;
2300 }
2301 
2302 int show_sources(void *optctx, const char *opt, const char *arg)
2303 {
2304  AVInputFormat *fmt = NULL;
2305  char *dev = NULL;
2306  AVDictionary *opts = NULL;
2307  int ret = 0;
2308  int error_level = av_log_get_level();
2309 
2311 
2312  if ((ret = show_sinks_sources_parse_arg(arg, &dev, &opts)) < 0)
2313  goto fail;
2314 
2315  do {
2316  fmt = av_input_audio_device_next(fmt);
2317  if (fmt) {
2318  if (!strcmp(fmt->name, "lavfi"))
2319  continue; //it's pointless to probe lavfi
2320  if (dev && !av_match_name(dev, fmt->name))
2321  continue;
2322  print_device_sources(fmt, opts);
2323  }
2324  } while (fmt);
2325  do {
2326  fmt = av_input_video_device_next(fmt);
2327  if (fmt) {
2328  if (dev && !av_match_name(dev, fmt->name))
2329  continue;
2330  print_device_sources(fmt, opts);
2331  }
2332  } while (fmt);
2333  fail:
2334  av_dict_free(&opts);
2335  av_free(dev);
2336  av_log_set_level(error_level);
2337  return ret;
2338 }
2339 
2340 int show_sinks(void *optctx, const char *opt, const char *arg)
2341 {
2342  AVOutputFormat *fmt = NULL;
2343  char *dev = NULL;
2344  AVDictionary *opts = NULL;
2345  int ret = 0;
2346  int error_level = av_log_get_level();
2347 
2349 
2350  if ((ret = show_sinks_sources_parse_arg(arg, &dev, &opts)) < 0)
2351  goto fail;
2352 
2353  do {
2354  fmt = av_output_audio_device_next(fmt);
2355  if (fmt) {
2356  if (dev && !av_match_name(dev, fmt->name))
2357  continue;
2358  print_device_sinks(fmt, opts);
2359  }
2360  } while (fmt);
2361  do {
2362  fmt = av_output_video_device_next(fmt);
2363  if (fmt) {
2364  if (dev && !av_match_name(dev, fmt->name))
2365  continue;
2366  print_device_sinks(fmt, opts);
2367  }
2368  } while (fmt);
2369  fail:
2370  av_dict_free(&opts);
2371  av_free(dev);
2372  av_log_set_level(error_level);
2373  return ret;
2374 }
2375 
2376 #endif
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:30
OPT_FLOAT
#define OPT_FLOAT
Definition: cmdutils.h:168
add_bytes
static void add_bytes(HYuvContext *s, uint8_t *dst, uint8_t *src, int w)
Definition: huffyuvdec.c:859
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
GET_ARG
#define GET_ARG(arg)
OPT_EXIT
#define OPT_EXIT
Definition: cmdutils.h:171
ff_get_audio_buffer
AVFrame * ff_get_audio_buffer(AVFilterLink *link, int nb_samples)
Request an audio samples buffer with a specific set of permissions.
Definition: audio.c:86
be
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it be(in the first position) for now. Options ------- Then comes the options array. This is what will define the user accessible options. For example
av_force_cpu_flags
void av_force_cpu_flags(int arg)
Disables cpu detection and forces the specified flags.
Definition: cpu.c:67
AVCodec
AVCodec.
Definition: codec.h:197
print_codecs_for_id
static void print_codecs_for_id(enum AVCodecID id, int encoder)
Definition: cmdutils.c:1537
L1
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 L1
Definition: snow.txt:554
OptionGroup::group_def
const OptionGroupDef * group_def
Definition: cmdutils.h:309
show_help_default
void show_help_default(const char *opt, const char *arg)
Per-fftool specific help handler.
Definition: ffmpeg_opt.c:3228
stride
int stride
Definition: mace.c:144
AVMEDIA_TYPE_SUBTITLE
@ AVMEDIA_TYPE_SUBTITLE
Definition: avutil.h:204
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
direct
static void direct(const float *in, const FFTComplex *ir, int len, float *out)
Definition: af_afir.c:60
process
static void process(NormalizeContext *s, AVFrame *in, AVFrame *out)
Definition: vf_normalize.c:156
draw_horiz_band
static void draw_horiz_band(AVCodecContext *ctx, const AVFrame *fr, int offset[4], int slice_position, int type, int height)
Definition: api-band-test.c:36
sws_isSupportedOutput
#define sws_isSupportedOutput(x)
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
status
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
L2
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 L2
Definition: snow.txt:554
level
uint8_t level
Definition: svq3.c:206
program
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C program
Definition: undefined.txt:6
INFINITY
#define INFINITY
Definition: mathematics.h:67
cast
The reader does not expect b to be semantically here and if the code is changed by maybe adding a cast
Definition: undefined.txt:36
avdevice_list_input_sources
int avdevice_list_input_sources(AVInputFormat *device, const char *device_name, AVDictionary *device_options, AVDeviceInfoList **device_list)
List devices.
Definition: avdevice.c:123
sws_isSupportedInput
#define sws_isSupportedInput(x)
AVOutputFormat::extensions
const char * extensions
comma-separated filename extensions
Definition: avformat.h:499
init
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:31
mix
static int mix(int c0, int c1)
Definition: 4xm.c:715
AVOutputFormat::name
const char * name
Definition: avformat.h:491
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
nb_input_files
int nb_input_files
Definition: ffmpeg.c:151
avio_protocol_get_class
const AVClass * avio_protocol_get_class(const char *name)
Get AVClass by names of available protocols.
Definition: protocols.c:129
opt.h
AV_OPT_FLAG_VIDEO_PARAM
#define AV_OPT_FLAG_VIDEO_PARAM
Definition: opt.h:281
GET_SAMPLE_RATE_NAME
#define GET_SAMPLE_RATE_NAME(rate)
Definition: cmdutils.h:631
AVCodecParameters::codec_type
enum AVMediaType codec_type
General type of the encoded data.
Definition: codec_par.h:56
space
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated space
Definition: undefined.txt:4
AV_IS_INPUT_DEVICE
#define AV_IS_INPUT_DEVICE(category)
Definition: log.h:50
avfilter_pad_get_name
const char * avfilter_pad_get_name(const AVFilterPad *pads, int pad_idx)
Get the name of an AVFilterPad.
Definition: avfilter.c:1050
OptionDef::off
size_t off
Definition: cmdutils.h:183
Then
status_out is the status that have been taken into it is final when it is not The typical task of an activate callback is to first check the backward status of output and if relevant forward it to the corresponding input Then
Definition: filter_design.txt:165
transforms
static const struct @72 transforms[18]
AVCodec::long_name
const char * long_name
Descriptive name for the codec, meant to be more human readable than name.
Definition: codec.h:209
libm.h
report_file
static FILE * report_file
Definition: cmdutils.c:72
show_formats
int show_formats(void *optctx, const char *opt, const char *arg)
Print a listing containing all the formats supported by the program (including devices).
Definition: cmdutils.c:1355
av_bprint_finalize
int av_bprint_finalize(AVBPrint *buf, char **ret_str)
Finalize a print buffer.
Definition: bprint.c:235
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:337
AV_CODEC_PROP_LOSSY
#define AV_CODEC_PROP_LOSSY
Codec supports lossy compression.
Definition: codec_desc.h:78
elements
static const ElemCat * elements[ELEMENT_COUNT]
Definition: signature.h:566
scheduling
===============The purpose of these rules is to ensure that frames flow in the filter graph without getting stuck and accumulating somewhere. Simple filters that output one frame for each input frame should not have to worry about it. There are two design for filters:one using the filter_frame() and request_frame() callbacks and the other using the activate() callback. The design using filter_frame() and request_frame() is legacy, but it is suitable for filters that have a single input and process one frame at a time. New filters with several inputs, that treat several frames at a time or that require a special treatment at EOF should probably use the design using activate(). activate -------- This method is called when something must be done in a filter scheduling
Definition: filter_design.txt:142
opt_report
int opt_report(void *optctx, const char *opt, const char *arg)
Definition: cmdutils.c:1052
MID_STATE
#define MID_STATE
Definition: snow.h:40
av_get_sample_fmt_string
char * av_get_sample_fmt_string(char *buf, int buf_size, enum AVSampleFormat sample_fmt)
Generate a string corresponding to the sample format with sample_fmt, or a header if sample_fmt is ne...
Definition: samplefmt.c:93
av_bprint_init
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max)
Definition: bprint.c:69
show_layouts
int show_layouts(void *optctx, const char *opt, const char *arg)
Print a listing containing all the standard channel layouts supported by the program.
Definition: cmdutils.c:1778
cb
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:215
nothing
static void nothing(void *foo)
Definition: dshow_capture.h:53
is
The official guide to swscale for confused that is
Definition: swscale.txt:28
playlist
Definition: hls.c:93
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:264
AV_LOG_QUIET
#define AV_LOG_QUIET
Print no output.
Definition: log.h:176
AVCodec::priv_class
const AVClass * priv_class
AVClass for the private context.
Definition: codec.h:223
init_parse_context
static void init_parse_context(OptionParseContext *octx, const OptionGroupDef *groups, int nb_groups)
Definition: cmdutils.c:701
developers
The official guide to swscale for confused developers
Definition: swscale.txt:2
SHOW_DEFAULT
@ SHOW_DEFAULT
Definition: cmdutils.c:77
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1096
log_callback_report
static void log_callback_report(void *ptr, int level, const char *fmt, va_list vl)
Definition: cmdutils.c:101
sample_fmts
static enum AVSampleFormat sample_fmts[]
Definition: adpcmenc.c:925
va_copy.h
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:55
AV_CODEC_CAP_HARDWARE
#define AV_CODEC_CAP_HARDWARE
Codec is backed by a hardware implementation.
Definition: codec.h:157
FFERROR_NOT_READY
return FFERROR_NOT_READY
Definition: filter_design.txt:204
filters
static const struct PPFilter filters[]
Definition: postprocess.c:134
AV_LOG_PANIC
#define AV_LOG_PANIC
Something went really wrong and we will crash now.
Definition: log.h:181
edgedetect
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API and a limited and testing changes the pixels in whatever fashion you and outputs the modified frame The most simple way of doing this is to take a similar filter We ll pick edgedetect
Definition: writing_filters.txt:16
AVDeviceInfo::device_name
char * device_name
device name, format depends on device
Definition: avdevice.h:458
sws_dict
AVDictionary * sws_dict
Definition: cmdutils.c:68
show_help_codec
static void show_help_codec(const char *name, int encoder)
Definition: cmdutils.c:1816
get_media_type_char
static char get_media_type_char(enum AVMediaType type)
Definition: cmdutils.c:1483
AVBitStreamFilter::name
const char * name
Definition: bsf.h:99
mv
static const int8_t mv[256][2]
Definition: 4xm.c:78
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
codecs
static struct codec_string codecs[]
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
show_version
int show_version(void *optctx, const char *opt, const char *arg)
Print the version of the program to stdout.
Definition: cmdutils.c:1194
basis
static int16_t basis[64][64]
Definition: mpegvideo_enc.c:4323
avformat_get_class
const AVClass * avformat_get_class(void)
Get the AVClass for AVFormatContext.
Definition: options.c:246
program_name
const char program_name[]
program name, defined by the program for show_version().
Definition: ffmpeg.c:109
design
Filter design
Definition: filter_design.txt:2
av_unused
#define av_unused
Definition: attributes.h:131
state
static struct @321 state
AVDeviceInfoList::nb_devices
int nb_devices
number of autodetected devices
Definition: avdevice.h:467
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
GET_PIX_FMT_NAME
#define GET_PIX_FMT_NAME(pix_fmt)
Definition: cmdutils.h:622
callbacks
static const OMX_CALLBACKTYPE callbacks
Definition: omx.c:332
AV_CODEC_CAP_TRUNCATED
#define AV_CODEC_CAP_TRUNCATED
Definition: codec.h:53
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:27
av_frame_make_writable
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:611
pixdesc.h
step
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But a word about which is also called distortion Distortion can be quantified by almost any quality measurement one chooses the sum of squared differences is used but more complex methods that consider psychovisual effects can be used as well It makes no difference in this discussion First step
Definition: rate_distortion.txt:58
print_codec
static void print_codec(const AVCodec *c)
Definition: cmdutils.c:1388
AVCodec::capabilities
int capabilities
Codec capabilities.
Definition: codec.h:216
w
uint8_t w
Definition: llviddspenc.c:39
Rate
Rate
G723.1 rate values.
Definition: g723_1.h:72
OPT_INPUT
#define OPT_INPUT
Definition: cmdutils.h:178
even
Tag MUST be even
Definition: snow.txt:206
sources
Note except for filters that can have queued frames and sources
Definition: filter_design.txt:285
AVPixFmtDescriptor::name
const char * name
Definition: pixdesc.h:82
AVOption
AVOption.
Definition: opt.h:248
HAS_ARG
#define HAS_ARG
Definition: cmdutils.h:161
OptionGroupList::groups
OptionGroup * groups
Definition: cmdutils.h:329
b
#define b
Definition: input.c:41
chroma
static av_always_inline void chroma(WaveformContext *s, AVFrame *in, AVFrame *out, int component, int intensity, int offset_y, int offset_x, int column, int mirror, int jobnr, int nb_jobs)
Definition: vf_waveform.c:1624
table
static const uint16_t table[]
Definition: prosumer.c:206
likely
#define likely(x)
Definition: asm.h:33
OptionDef::dst_ptr
void * dst_ptr
Definition: cmdutils.h:181
OptionGroupList::nb_groups
int nb_groups
Definition: cmdutils.h:330
data
const char data[16]
Definition: mxf.c:142
linear
static int linear(InterplayACMContext *s, unsigned ind, unsigned col)
Definition: interplayacm.c:121
av_pix_fmt_desc_next
const AVPixFmtDescriptor * av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev)
Iterate over all pixel format descriptors known to libavutil.
Definition: pixdesc.c:2580
format_opts
AVDictionary * format_opts
Definition: cmdutils.c:70
avio_enum_protocols
const char * avio_enum_protocols(void **opaque, int output)
Iterate through names of available protocols.
Definition: protocols.c:114
half
static uint8_t half(int a, int b)
Definition: mobiclip.c:541
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:408
integer
int integer
Definition: swresample_internal.h:37
convert
Definition: convert.py:1
AV_DICT_IGNORE_SUFFIX
#define AV_DICT_IGNORE_SUFFIX
Return first entry in a dictionary whose first part corresponds to the search key,...
Definition: dict.h:70
possible
the frame and frame reference mechanism is intended to as much as possible
Definition: filter_design.txt:45
av_mallocz_array
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:190
FLAGS
#define FLAGS
Definition: cmdutils.c:540
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
F
#define F(x)
base
uint8_t base
Definition: vp3data.h:141
fc
#define fc(width, name, range_min, range_max)
Definition: cbs_av1.c:551
avresample.h
OptionGroup::swr_opts
AVDictionary * swr_opts
Definition: cmdutils.h:319
allocate
#define allocate(name, size)
Definition: cbs_h2645.c:432
show_help_children
void show_help_children(const AVClass *class, int flags)
Show help for all options with given flags in class and all its children.
Definition: cmdutils.c:203
AVOption::flags
int flags
Definition: opt.h:277
av_get_bits_per_pixel
int av_get_bits_per_pixel(const AVPixFmtDescriptor *pixdesc)
Return the number of bits per pixel used by the pixel format described by pixdesc.
Definition: pixdesc.c:2525
SHOW_COPYRIGHT
#define SHOW_COPYRIGHT
Definition: cmdutils.c:1099
max
#define max(a, b)
Definition: cuda_runtime.h:33
mathematics.h
filter
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce then the filter should push the output frames on the output link immediately As an exception to the previous rule if the input frame is enough to produce several output frames then the filter needs output only at least one per link The additional frames can be left buffered in the filter
Definition: filter_design.txt:228
av_bsf_iterate
const AVBitStreamFilter * av_bsf_iterate(void **opaque)
Iterate over all registered bitstream filters.
Definition: bitstream_filters.c:67
AVDictionary
Definition: dict.c:30
Frame
Definition: ffplay.c:155
av_get_cpu_flags
int av_get_cpu_flags(void)
Return the flags which specify extensions supported by the CPU.
Definition: cpu.c:95
subbands
subbands
Definition: aptx.h:39
processed
status_in is a status change that must be taken into account after all frames in fifo have been processed
Definition: filter_design.txt:159
hide_banner
int hide_banner
Definition: cmdutils.c:74
config_props
static int config_props(AVFilterLink *outlink)
Definition: aeval.c:222
put_pixel
static void put_pixel(uint16_t *dst, ptrdiff_t linesize, const int16_t *in, int bits_per_raw_sample)
Add bias value, clamp and output pixels of a slice.
Definition: proresdsp.c:41
though
though
Definition: snow.txt:1
AV_OPT_FLAG_FILTERING_PARAM
#define AV_OPT_FLAG_FILTERING_PARAM
a generic parameter which can be set by the user for filtering
Definition: opt.h:294
Makefile
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter Makefile
Definition: writing_filters.txt:20
FF_FILTER_FORWARD_STATUS_BACK
#define FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink)
Forward the status on an output link to an input link.
Definition: filters.h:199
H0
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 H0
Definition: snow.txt:554
ff_thread_await_progress
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them. reget_buffer() and buffer age optimizations no longer work. *The contents of buffers must not be written to after ff_thread_report_progress() has been called on them. This includes draw_edges(). Porting codecs to frame threading
quality
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But a word about quality
Definition: rate_distortion.txt:12
AVOutputFormat::subtitle_codec
enum AVCodecID subtitle_codec
default subtitle codec
Definition: avformat.h:503
D
D(D(float, sse)
Definition: rematrix_init.c:28
OptionDef
Definition: cmdutils.h:158
AVUNERROR
#define AVUNERROR(e)
Definition: error.h:44
av_bsf_get_by_name
const AVBitStreamFilter * av_bsf_get_by_name(const char *name)
Definition: bitstream_filters.c:84
AVInputFormat::long_name
const char * long_name
Descriptive name for the format, meant to be more human-readable than name.
Definition: avformat.h:652
bit
#define bit(string, value)
Definition: cbs_mpeg2.c:58
A
#define A(x)
Definition: vp56_arith.h:28
exit_program
void exit_program(int ret)
Wraps exit with a program-specific cleanup routine.
Definition: cmdutils.c:133
InputStream
Definition: ffmpeg.h:300
Filter
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 Filter
Definition: snow.txt:554
ff_inlink_consume_frame
int ff_inlink_consume_frame(AVFilterLink *link, AVFrame **rframe)
Take a frame from the link's FIFO and update the link's stats.
Definition: avfilter.c:1494
av_max_alloc
void av_max_alloc(size_t max)
Set the maximum size that may be allocated in one block.
Definition: mem.c:73
parse_number_or_die
double parse_number_or_die(const char *context, const char *numstr, int type, double min, double max)
Parse a string and return its corresponding value as a double.
Definition: cmdutils.c:141
return
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a it should return
Definition: filter_design.txt:264
av_guess_format
ff_const59 AVOutputFormat * av_guess_format(const char *short_name, const char *filename, const char *mime_type)
Return the output format in the list of registered output formats which best matches the provided par...
Definition: format.c:51
avcodec_find_decoder_by_name
AVCodec * avcodec_find_decoder_by_name(const char *name)
Find a registered decoder with the specified name.
Definition: allcodecs.c:974
rgb
Definition: rpzaenc.c:58
print_error
void print_error(const char *filename, int err)
Print an error message to stderr, indicating filename and a human readable description of the error c...
Definition: cmdutils.c:1084
some
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that some(invalid) inputs can trigger overflows(undefined behavior). In these cases
decoder
static const chunk_decoder decoder[8]
Definition: dfa.c:330
OptionGroupList
A list of option groups that all have the same group type (e.g.
Definition: cmdutils.h:326
reasons
if it could not for temporary reasons
Definition: filter_design.txt:265
fail
#define fail()
Definition: checkasm.h:133
resolution
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit it is converted to planar bit YUV Two sets of converters exist for this the other leaves the full chroma resolution
Definition: swscale.txt:54
av_strerror
int av_strerror(int errnum, char *errbuf, size_t errbuf_size)
Put a description of the AVERROR code errnum in errbuf.
Definition: error.c:105
show_decoders
int show_decoders(void *optctx, const char *opt, const char *arg)
Print a listing containing all the decoders supported by the program.
Definition: cmdutils.c:1645
av_output_video_device_next
AVOutputFormat * av_output_video_device_next(AVOutputFormat *d)
Video output devices iterator.
Definition: alldevices.c:138
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:140
frames
if it could not because there are no more frames
Definition: filter_design.txt:266
relevant
status_out is the status that have been taken into it is final when it is not The typical task of an activate callback is to first check the backward status of output and if relevant forward it to the corresponding input if relevant
Definition: filter_design.txt:165
SHOW_CONFIG
#define SHOW_CONFIG
Definition: cmdutils.c:1098
av_filter_iterate
const AVFilter * av_filter_iterate(void **opaque)
Iterate over all registered filters.
Definition: allfilters.c:534
ff_thread_get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames. The frames must then be freed with ff_thread_release_buffer(). Otherwise decode directly into the user-supplied frames. Call ff_thread_report_progress() after some part of the current picture has decoded. A good place to put this is where draw_horiz_band() is called - add this if it isn 't called anywhere
IA
#define IA(x)
Definition: cast5.c:26
av_parse_cpu_caps
int av_parse_cpu_caps(unsigned *flags, const char *s)
Parse CPU caps from a string and update the given AV_CPU_* flags based on that.
Definition: cpu.c:196
tables
Writing a table generator This documentation is preliminary Parts of the API are not good and should be changed Basic concepts A table generator consists of two *_tablegen c and *_tablegen h The h file will provide the variable declarations and initialization code for the tables
Definition: tablegen.txt:10
OptionParseContext
Definition: cmdutils.h:333
future
FFmpeg s bug feature request tracker new issues and changes to existing issues can be done through a web interface Issues can be different kinds of things we want to keep track of but that do not belong into the source tree itself This includes bug feature requests and license violations We might add more items to this list in the future
Definition: issue_tracker.txt:13
avcodec_find_encoder
AVCodec * avcodec_find_encoder(enum AVCodecID id)
Find a registered encoder with a matching codec ID.
Definition: allcodecs.c:941
AVERROR_OPTION_NOT_FOUND
#define AVERROR_OPTION_NOT_FOUND
Option not found.
Definition: error.h:61
AV_BPRINT_SIZE_AUTOMATIC
#define AV_BPRINT_SIZE_AUTOMATIC
Option
An option extracted from the commandline.
Definition: cmdutils.h:287
variant
Definition: hls.c:180
val
static double val(void *priv, double ch)
Definition: aeval.c:76
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
update
static av_always_inline void update(SilenceDetectContext *s, AVFrame *insamples, int is_silence, int current_sample, int64_t nb_samples_notify, AVRational time_base)
Definition: af_silencedetect.c:78
pts
static int64_t pts
Definition: transcode_aac.c:652
account
status_out is the status that have been taken into account
Definition: filter_design.txt:160
sws_get_class
const AVClass * sws_get_class(void)
Get the AVClass for swsContext.
Definition: options.c:95
us
#define us(width, name, range_min, range_max, subs,...)
Definition: cbs_h2645.c:278
AV_PKT_DATA_DISPLAYMATRIX
@ AV_PKT_DATA_DISPLAYMATRIX
This side data contains a 3x3 transformation matrix describing an affine transformation that needs to...
Definition: packet.h:108
OptionGroup::nb_opts
int nb_opts
Definition: cmdutils.h:313
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:465
show_muxdemuxers
show_muxdemuxers
Definition: cmdutils.c:76
OPT_STRING
#define OPT_STRING
Definition: cmdutils.h:164
OptionGroupList::group_def
const OptionGroupDef * group_def
Definition: cmdutils.h:327
AVFILTER_FLAG_DYNAMIC_INPUTS
#define AVFILTER_FLAG_DYNAMIC_INPUTS
The number of the filter inputs is not determined just by AVFilter.inputs.
Definition: avfilter.h:106
fast
static int fast
Definition: ffplay.c:334
OptionDef::help
const char * help
Definition: cmdutils.h:185
AVRational::num
int num
Numerator.
Definition: rational.h:59
idct
static void idct(int16_t block[64])
Definition: 4xm.c:164
InputFile
Definition: ffmpeg.h:400
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
show_help_bsf
static void show_help_bsf(const char *name)
Definition: cmdutils.c:1977
OptionGroupDef
Definition: cmdutils.h:293
reverse
static uint32_t reverse(uint32_t num, int bits)
Definition: speedhqenc.c:51
resample
static int resample(ResampleContext *c, void *dst, const void *src, int *consumed, int src_size, int dst_size, int update_ctx, int nearest_neighbour)
Definition: resample.c:259
qlogs
spatial_decomposition_type s header_state qlog s header_state mv_scale s header_state qbias s header_state block_max_depth s header_state qlogs
Definition: snow.txt:85
LH
#define LH(psrc)
Definition: generic_macros_msa.h:93
AVDeviceInfoList::devices
AVDeviceInfo ** devices
list of autodetected devices
Definition: avdevice.h:466
aligned
static int aligned(int val)
Definition: dashdec.c:168
C
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the C
Definition: writing_filters.txt:58
check_stream_specifier
int check_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec)
Check if the given stream matches a stream specifier.
Definition: cmdutils.c:2095
SHOW_VERSION
#define SHOW_VERSION
Definition: cmdutils.c:1097
first
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But first
Definition: rate_distortion.txt:12
avassert.h
variables
FFmpeg currently uses a custom build this text attempts to document some of its obscure features and options Makefile variables
Definition: build_system.txt:7
description
Tag description
Definition: snow.txt:206
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:220
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
print_buildconf
static void print_buildconf(int flags, int level)
Definition: cmdutils.c:1157
initFilter
static av_cold int initFilter(int16_t **outFilter, int32_t **filterPos, int *outFilterSize, int xInc, int srcW, int dstW, int filterAlign, int one, int flags, int cpu_flags, SwsVector *srcFilter, SwsVector *dstFilter, double param[2], int srcPos, int dstPos)
Definition: utils.c:337
AV_CODEC_CAP_EXPERIMENTAL
#define AV_CODEC_CAP_EXPERIMENTAL
Codec is experimental and is thus avoided in favor of non experimental encoders.
Definition: codec.h:100
AVInputFormat
Definition: avformat.h:640
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVInputFormat::extensions
const char * extensions
If extensions are defined, then no probe is done.
Definition: avformat.h:666
OptionGroup::codec_opts
AVDictionary * codec_opts
Definition: cmdutils.h:315
ff_set_common_formats
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:587
set
static void set(uint8_t *a[], int ch, int index, int ch_count, enum AVSampleFormat f, double v)
Definition: swresample.c:59
expand_filename_template
static void expand_filename_template(AVBPrint *bp, const char *template, struct tm *tm)
Definition: cmdutils.c:946
check_options
static void check_options(const OptionDef *po)
Definition: cmdutils.c:493
media_type_string
#define media_type_string
Definition: cmdutils.h:617
await_progress
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report await_progress()
ff_thread_report_progress
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
Definition: pthread_frame.c:590
check
#define check(x, y, S, v)
Definition: motion_est_template.c:405
YUV
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like YUV
Definition: swscale.txt:38
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
postprocess.h
class
#define class
Definition: math.h:25
av_log_format_line
void av_log_format_line(void *ptr, int level, const char *fmt, va_list vl, char *line, int line_size, int *print_prefix)
Format a line of log the same way as the default callback.
Definition: log.c:328
decode
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
ff_outlink_set_status
static void ff_outlink_set_status(AVFilterLink *link, int status, int64_t pts)
Set the status field of a link from the source filter.
Definition: filters.h:189
OPT_INT
#define OPT_INT
Definition: cmdutils.h:167
ff_inlink_request_frame
void ff_inlink_request_frame(AVFilterLink *link)
Mark that a frame is wanted on the link.
Definition: avfilter.c:1620
input_streams
InputStream ** input_streams
Definition: ffmpeg.c:148
width
#define width
MC
#define MC(PEL, DIR, WIDTH)
Definition: hevcdsp_mips.h:26
AVCodecDescriptor
This struct describes the properties of a single codec described by an AVCodecID.
Definition: codec_desc.h:38
matter
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not matter(as it is from invalid input). In some cases the input can be checked easily in others checking the input is computationally too intensive. In these remaining cases a unsigned type can be used instead of a signed type. unsigned overflows are defined in C. SUINT ----- As we have above established there is a need to use "unsigned" sometimes in computations which work with signed integers(which overflow). Using "unsigned" for signed integers has the very significant potential to cause confusion as in unsigned a
s
#define s(width, name)
Definition: cbs_vp9.c:257
OptionDef::argname
const char * argname
Definition: cmdutils.h:186
split_commandline
int split_commandline(OptionParseContext *octx, int argc, char *argv[], const OptionDef *options, const OptionGroupDef *groups, int nb_groups)
Split the commandline into an intermediate form convenient for further processing.
Definition: cmdutils.c:749
AV_OPT_FLAG_ENCODING_PARAM
#define AV_OPT_FLAG_ENCODING_PARAM
a generic parameter which can be set by the user for muxing or encoding
Definition: opt.h:278
resample_opts
AVDictionary * resample_opts
Definition: cmdutils.c:70
offsets
static const int offsets[]
Definition: hevc_pel.c:34
av_realloc_array
void * av_realloc_array(void *ptr, size_t nmemb, size_t size)
Definition: mem.c:198
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
floor
static __device__ float floor(float a)
Definition: cuda_runtime.h:173
AVInputFormat::name
const char * name
A comma separated list of short names for the format.
Definition: avformat.h:645
SWS_FULL_CHR_H_INP
#define SWS_FULL_CHR_H_INP
Definition: swscale.h:81
g
const char * g
Definition: vf_curves.c:117
changes
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API changes
Definition: writing_filters.txt:8
AVDictionaryEntry::key
char * key
Definition: dict.h:82
Option::key
const char * key
Definition: cmdutils.h:289
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
avfilter_pad_count
int avfilter_pad_count(const AVFilterPad *pads)
Get the number of elements in a NULL-terminated array of AVFilterPads (e.g.
Definition: avfilter.c:561
sse
static int sse(MpegEncContext *s, uint8_t *src1, uint8_t *src2, int w, int h, int stride)
Definition: mpegvideo_enc.c:2744
AV_CODEC_CAP_OTHER_THREADS
#define AV_CODEC_CAP_OTHER_THREADS
Codec supports multithreading through a method other than slice- or frame-level multithreading.
Definition: codec.h:122
info
MIPS optimizations info
Definition: mips.txt:2
swr_alloc
av_cold struct SwrContext * swr_alloc(void)
Allocate SwrContext.
Definition: options.c:149
bits
uint8_t bits
Definition: vp3data.h:141
from
const char * from
Definition: jacosubdec.c:65
to
const char * to
Definition: webvttdec.c:34
avresample_get_class
const attribute_deprecated AVClass * avresample_get_class(void)
Definition: options.c:110
form
This is the more generic form
Definition: tablegen.txt:34
AVOutputFormat::audio_codec
enum AVCodecID audio_codec
default audio codec
Definition: avformat.h:501
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
get
static void get(uint8_t *pixels, int stride, int16_t *block)
Definition: proresenc_anatoliy.c:306
reaction
Note except for filters that can have queued frames and request_frame does not push and as a reaction
Definition: filter_design.txt:287
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:309
data
the buffer is automatically deallocated once all corresponding references have been destroyed The characteristics of the data(resolution, sample rate, etc.) are stored in the reference
av_get_channel_name
const char * av_get_channel_name(uint64_t channel)
Get the name of a given channel.
Definition: channel_layout.c:249
AVFilter::flags
int flags
A combination of AVFILTER_FLAG_*.
Definition: avfilter.h:188
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:215
ctx
AVFormatContext * ctx
Definition: movenc.c:48
pointers
Undefined Behavior In the C some operations are like signed integer dereferencing freed pointers
Definition: undefined.txt:4
dump_argument
static void dump_argument(const char *a)
Definition: cmdutils.c:469
report_file_level
static int report_file_level
Definition: cmdutils.c:73
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:40
Slice
Definition: magicyuv.c:37
on
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going on
Definition: writing_filters.txt:34
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
write_fileheader
write_fileheader() adds some minor things like a "this is a generated file" comment and some standard includes. tablegen.h defines some write functions for one- and two-dimensional arrays for standard types - they print only the "core" parts so they are easier to reuse for multi-dimensional arrays so the outermost
Definition: tablegen.txt:39
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:92
export
static int export(AVFilterContext *ctx, StreamContext *sc, int input)
Definition: vf_signature.c:570
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:369
clients
=============================================Slice threading - *The client 's draw_horiz_band() must be thread-safe according to the comment in avcodec.h. Frame threading - *Restrictions with slice threading also apply. *Custom get_buffer2() and get_format() callbacks must be thread-safe. *There is one frame of delay added for every thread beyond the first one. Clients must be able to handle this clients
Definition: multithreading.txt:25
parse_options
void parse_options(void *optctx, int argc, char **argv, const OptionDef *options, void(*parse_arg_function)(void *, const char *))
Definition: cmdutils.c:379
AV_OPT_FLAG_BSF_PARAM
#define AV_OPT_FLAG_BSF_PARAM
a generic parameter which can be set by the user for bit stream filtering
Definition: opt.h:292
key
const char * key
Definition: hwcontext_opencl.c:168
SwrContext
The libswresample context.
Definition: swresample_internal.h:95
AVMEDIA_TYPE_DATA
@ AVMEDIA_TYPE_DATA
Opaque data information usually continuous.
Definition: avutil.h:203
XMM_CLOBBERS
#define XMM_CLOBBERS(...)
Definition: asm.h:98
f
#define f(width, name)
Definition: cbs_vp9.c:255
pass
#define pass
Definition: fft_template.c:603
command
static int command(AVFilterContext *ctx, const char *cmd, const char *arg, char *res, int res_len, int flags)
Definition: vf_drawtext.c:873
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_OPT_FLAG_AUDIO_PARAM
#define AV_OPT_FLAG_AUDIO_PARAM
Definition: opt.h:280
compare_codec_desc
static int compare_codec_desc(const void *a, const void *b)
Definition: cmdutils.c:1507
int32_t
int32_t
Definition: audio_convert.c:194
ff_inlink_make_frame_writable
int ff_inlink_make_frame_writable(AVFilterLink *link, AVFrame **rframe)
Make sure a frame is writable.
Definition: avfilter.c:1538
av_opt_find
const AVOption * av_opt_find(void *obj, const char *name, const char *unit, int opt_flags, int search_flags)
Look for an option in an object.
Definition: opt.c:1661
arg
const char * arg
Definition: jacosubdec.c:66
callback
static void callback(void *priv_data, int index, uint8_t *buf, int buf_size, int64_t time, enum dshowDeviceType devtype)
Definition: dshow.c:161
included
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should the initialization functions should not do and instead of the variable declarations the generated *_tables h file should be included Since that will be generated in the build the path must be included
Definition: tablegen.txt:59
fields
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the then the processing requires a frame on this link and the filter is expected to make efforts in that direction The status of input links is stored by the fifo and status_out fields
Definition: filter_design.txt:155
if
if(ret)
Definition: filter_design.txt:179
OPT_SPEC
#define OPT_SPEC
Definition: cmdutils.h:175
finish_group
static void finish_group(OptionParseContext *octx, int group_idx, const char *arg)
Definition: cmdutils.c:658
output_streams
OutputStream ** output_streams
Definition: ffmpeg.c:153
H2
vertical halfpel samples are found by H2[y][x]
Definition: snow.txt:421
AV_IS_OUTPUT_DEVICE
#define AV_IS_OUTPUT_DEVICE(category)
Definition: log.h:55
context
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they for example set the foo of the bar offset is the offset of the field in your context
Definition: writing_filters.txt:91
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:108
AVFormatContext
Format I/O context.
Definition: avformat.h:1232
negotiation
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format negotiation
Definition: filter_design.txt:12
av_log_get_level
int av_log_get_level(void)
Get the current log level.
Definition: log.c:435
need
must be printed separately If there s no standard function for printing the type you need
Definition: tablegen.txt:45
show_buildconf
int show_buildconf(void *optctx, const char *opt, const char *arg)
Print the build configuration of the program to stdout.
Definition: cmdutils.c:1203
AV_CODEC_PROP_INTRA_ONLY
#define AV_CODEC_PROP_INTRA_ONLY
Codec uses only intra compression.
Definition: codec_desc.h:72
avfilter_get_by_name
const AVFilter * avfilter_get_by_name(const char *name)
Get a filter definition matching the given name.
Definition: allfilters.c:545
quant_table
static const int16_t quant_table[64]
Definition: intrax8.c:522
init_dynload
void init_dynload(void)
Initialize dynamic library loading.
Definition: cmdutils.c:117
opts
AVDictionary * opts
Definition: movenc.c:50
OptionGroup::format_opts
AVDictionary * format_opts
Definition: cmdutils.h:316
AVStream::codecpar
AVCodecParameters * codecpar
Codec parameters associated with this stream.
Definition: avformat.h:1038
main
int main(int argc, char *argv[])
Definition: avio_list_dir.c:112
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
avcodec_get_class
const AVClass * avcodec_get_class(void)
Get the AVClass for AVCodecContext.
Definition: options.c:311
result
and forward the result(frame or status change) to the corresponding input. If nothing is possible
fabs
static __device__ float fabs(float a)
Definition: cuda_runtime.h:182
NULL
#define NULL
Definition: coverity.c:32
flush
static void flush(AVCodecContext *avctx)
Definition: aacdec_template.c:592
frames
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across frames
Definition: multithreading.txt:36
OptionParseContext::global_opts
OptionGroup global_opts
Definition: cmdutils.h:334
Option::opt
const OptionDef * opt
Definition: cmdutils.h:288
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:658
run
uint8_t run
Definition: svq3.c:205
prepare_app_arguments
static void prepare_app_arguments(int *argc_ptr, char ***argv_ptr)
Definition: cmdutils.c:285
AVPixFmtDescriptor::nb_components
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:83
push
static void push(HysteresisContext *s, int x, int y, int w)
Definition: vf_hysteresis.c:145
pixel
uint8_t pixel
Definition: tiny_ssim.c:42
anything
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should the initialization functions should not do anything
Definition: tablegen.txt:56
swr_get_class
const AVClass * swr_get_class(void)
Get the AVClass for SwrContext.
Definition: options.c:144
frame_wanted_out
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the frame_wanted_out
Definition: filter_design.txt:148
LIBAVFILTER_VERSION_MICRO
#define LIBAVFILTER_VERSION_MICRO
Definition: version.h:34
contain
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should contain
Definition: tablegen.txt:55
transform
static const int8_t transform[32][32]
Definition: hevcdsp.c:27
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
period
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without period
Definition: writing_filters.txt:89
AVOutputFormat::get_device_list
int(* get_device_list)(struct AVFormatContext *s, struct AVDeviceInfoList *device_list)
Returns device list with it properties.
Definition: avformat.h:592
coefficients
static double coefficients[8 *8]
Definition: dctref.c:35
next_codec_for_id
static const AVCodec * next_codec_for_id(enum AVCodecID id, void **iter, int encoder)
Definition: cmdutils.c:1495
greater
static int greater(MetadataContext *s, const char *value1, const char *value2)
Definition: f_metadata.c:158
AVOutputFormat::long_name
const char * long_name
Descriptive name for the format, meant to be more human-readable than name.
Definition: avformat.h:497
show_formats_devices
static int show_formats_devices(void *optctx, const char *opt, const char *arg, int device_only, int muxdemuxers)
Definition: cmdutils.c:1292
activate
filter_frame For filters that do not use the activate() callback
H
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 this can end with a L or a H
Definition: snow.txt:555
system
FFmpeg currently uses a custom build system
Definition: build_system.txt:1
get_format
static int get_format(AVCodecContext *avctx, const enum AVPixelFormat *pix_fmts)
Definition: qsvdec.c:51
GET_CODEC_NAME
#define GET_CODEC_NAME(id)
Definition: cmdutils.h:625
warned_cfg
static int warned_cfg
Definition: cmdutils.c:1094
av_log_set_flags
void av_log_set_flags(int arg)
Definition: log.c:445
work
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should the initialization functions should not do and instead of the variable declarations the generated *_tables h file should be included Since that will be generated in the build the path must be i e not Makefile changes To make the automatic table creation work
Definition: tablegen.txt:66
src
#define src
Definition: vp8dsp.c:255
parseutils.h
INDENT
#define INDENT
Definition: cmdutils.c:1096
sws_alloc_context
struct SwsContext * sws_alloc_context(void)
Allocate an empty SwsContext.
Definition: utils.c:1093
show_muxers
int show_muxers(void *optctx, const char *opt, const char *arg)
Print a listing containing all the muxers supported by the program (including devices).
Definition: cmdutils.c:1360
L0
#define L0
Definition: hevcdec.h:60
init_opts
void init_opts(void)
Initialize the cmdutils option system, in particular allocate the *_opts contexts.
Definition: cmdutils.c:82
list
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
Definition: filter_design.txt:25
not
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If not
Definition: filter_design.txt:259
AVBitStreamFilter::priv_class
const AVClass * priv_class
A class for the private data, used to declare bitstream filter private AVOptions.
Definition: bsf.h:117
OPT_INT64
#define OPT_INT64
Definition: cmdutils.h:170
Prediction
Prediction
Definition: magicyuv.c:42
particular
different references for the same buffer can show different characteristics In particular
Definition: filter_design.txt:55
AV_CODEC_CAP_VARIABLE_FRAME_SIZE
#define AV_CODEC_CAP_VARIABLE_FRAME_SIZE
Audio encoder supports receiving a different number of samples in each call.
Definition: codec.h:129
av_cpu_max_align
size_t av_cpu_max_align(void)
Get the maximum data alignment that may be required by FFmpeg.
Definition: cpu.c:323
av_parse_time
int av_parse_time(int64_t *timeval, const char *timestr, int duration)
Parse timestr and return in *time a corresponding number of microseconds.
Definition: parseutils.c:587
abs
#define abs(x)
Definition: cuda_runtime.h:35
filter_frame
static int filter_frame(DBEDecodeContext *s, AVFrame *frame)
Definition: dolby_e.c:1049
AVOutputFormat::priv_class
const AVClass * priv_class
AVClass for the private context.
Definition: avformat.h:519
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
nb_input_streams
int nb_input_streams
Definition: ffmpeg.c:149
write_option
static int write_option(void *optctx, const OptionDef *po, const char *opt, const char *arg)
Definition: cmdutils.c:291
av_get_standard_channel_layout
int av_get_standard_channel_layout(unsigned index, uint64_t *layout, const char **name)
Get the value and name of a standard channel layout.
Definition: channel_layout.c:285
OptionGroup::opts
Option * opts
Definition: cmdutils.h:312
AVPixFmtDescriptor::flags
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:106
OptionGroup
Definition: cmdutils.h:308
ff_inlink_acknowledge_status
int ff_inlink_acknowledge_status(AVFilterLink *link, int *rstatus, int64_t *rpts)
Test and acknowledge the change of status on the link.
Definition: avfilter.c:1449
av_output_audio_device_next
AVOutputFormat * av_output_audio_device_next(AVOutputFormat *d)
Audio output devices iterator.
Definition: alldevices.c:133
diff_bytes
static void diff_bytes(HYuvContext *s, uint8_t *dst, const uint8_t *src0, const uint8_t *src1, int w)
Definition: huffyuvenc.c:41
Range
Definition: vf_colorbalance.c:38
swresample.h
index
int index
Definition: gxfenc.c:89
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
converted
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are converted
Definition: swscale.txt:46
H1
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 H1
Definition: snow.txt:554
input_files
InputFile ** input_files
Definition: ffmpeg.c:150
AV_OPT_SEARCH_FAKE_OBJ
#define AV_OPT_SEARCH_FAKE_OBJ
The obj passed to av_opt_find() is fake – only a double pointer to AVClass instead of a required poin...
Definition: opt.h:568
av_bprint_is_complete
static int av_bprint_is_complete(const AVBPrint *buf)
Test if the print buffer is complete (not truncated).
Definition: bprint.h:185
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:46
av_input_video_device_next
AVInputFormat * av_input_video_device_next(AVInputFormat *d)
Video input devices iterator.
Definition: alldevices.c:128
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
AVFILTER_FLAG_DYNAMIC_OUTPUTS
#define AVFILTER_FLAG_DYNAMIC_OUTPUTS
The number of the filter outputs is not determined just by AVFilter.outputs.
Definition: avfilter.h:112
methods
FFmpeg multithreading methods
Definition: multithreading.txt:2
source
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a source
Definition: filter_design.txt:255
AV_CODEC_CAP_CHANNEL_CONF
#define AV_CODEC_CAP_CHANNEL_CONF
Codec should fill in channel configuration and samplerate instead of container.
Definition: codec.h:104
http
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i http
Definition: writing_filters.txt:29
ff_thread_release_buffer
void ff_thread_release_buffer(AVCodecContext *avctx, ThreadFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
Definition: pthread_frame.c:1104
locate_option
int locate_option(int argc, char **argv, const OptionDef *options, const char *optname)
Return index of option opt in argv or 0 if not found.
Definition: cmdutils.c:443
av_codec_is_decoder
int av_codec_is_decoder(const AVCodec *codec)
Definition: utils.c:79
interleave
static void interleave(uint8_t *dst, uint8_t *src, int w, int h, int dst_linesize, int src_linesize, enum FilterMode mode, int swap)
Definition: vf_il.c:114
FF_FILTER_FORWARD_STATUS_ALL
FF_FILTER_FORWARD_STATUS_ALL(outlink, filter)
codec_opts
AVDictionary * codec_opts
Definition: cmdutils.c:70
options
const OptionDef options[]
eval.h
show_help_demuxer
static void show_help_demuxer(const char *name)
Definition: cmdutils.c:1852
minimum
static float minimum(float src0, float src1)
Definition: dnn_backend_native_layer_mathbinary.c:48
blur
static void blur(uint8_t *dst, int dst_step, const uint8_t *src, int src_step, int len, int radius, int pixsize)
Definition: vf_boxblur.c:160
H3
vertical horizontal halfpel samples are found by H3[y][x]
Definition: snow.txt:427
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
get_audio_buffer
static AVFrame * get_audio_buffer(AVFilterLink *inlink, int nb_samples)
Definition: avf_concat.c:208
AV_SAMPLE_FMT_NB
@ AV_SAMPLE_FMT_NB
Number of sample formats. DO NOT USE if linking dynamically.
Definition: samplefmt.h:74
show_help
int show_help(void *optctx, const char *opt, const char *arg)
Generic -h handler common to all fftools.
Definition: cmdutils.c:1996
AVMediaType
AVMediaType
Definition: avutil.h:199
av_log_set_callback
void av_log_set_callback(void(*callback)(void *, int, const char *, va_list))
Set the logging callback.
Definition: log.c:455
ff_inlink_set_status
void ff_inlink_set_status(AVFilterLink *link, int status)
Set the status on an input link.
Definition: avfilter.c:1628
avformat_match_stream_specifier
int avformat_match_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec)
Check if the stream st contained in s is matched by the stream specifier spec.
Definition: utils.c:5326
scroll
static void scroll(AVFilterContext *ctx, AVFrame *in, AVFrame *out)
Definition: vf_scroll.c:111
copy
static void copy(const float *p1, float *p2, const int length)
Definition: vf_vaguedenoiser.c:194
Header
@ Header
Definition: mxfdec.c:65
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
AVClass::category
AVClassCategory category
Category used for visualization (like color) This is only set if the category is equal for all object...
Definition: log.h:133
output_files
OutputFile ** output_files
Definition: ffmpeg.c:155
cpu.h
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:119
FFMAX
#define FFMAX(a, b)
Definition: common.h:103
get_preset_file
FILE * get_preset_file(char *filename, size_t filename_size, const char *preset_name, int is_path, const char *codec_name)
Get a file corresponding to a preset file.
Definition: cmdutils.c:2045
PRINT_CODEC_SUPPORTED
#define PRINT_CODEC_SUPPORTED(codec, field, type, list_name, term, get_name)
Definition: cmdutils.c:1375
AV_SAMPLE_FMT_NONE
@ AV_SAMPLE_FMT_NONE
Definition: samplefmt.h:59
sample
#define sample
Definition: flacdsp_template.c:44
uninit_opts
void uninit_opts(void)
Uninitialize the cmdutils option system, in particular free the *_opts contexts and their contents.
Definition: cmdutils.c:87
size
int size
Definition: twinvq_data.h:10344
print_codecs
static void print_codecs(int encoder)
Definition: cmdutils.c:1606
section
Definition: ffprobe.c:141
swr_free
av_cold void swr_free(SwrContext **ss)
Free the given SwrContext and set the pointer to NULL.
Definition: swresample.c:137
AV_PIX_FMT_FLAG_BITSTREAM
#define AV_PIX_FMT_FLAG_BITSTREAM
All values of a component are bit-wise packed end to end.
Definition: pixdesc.h:136
av_frame_is_writable
int av_frame_is_writable(AVFrame *frame)
Check if the frame data is writable.
Definition: frame.c:594
FFDIFFSIGN
#define FFDIFFSIGN(x, y)
Comparator.
Definition: common.h:101
does
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector so you can update the boilerplate with your credits Doxy Next chunk is the Doxygen about the file See does
Definition: writing_filters.txt:66
setup_find_stream_info_opts
AVDictionary ** setup_find_stream_info_opts(AVFormatContext *s, AVDictionary *codec_opts)
Setup AVCodecContext options for avformat_find_stream_info().
Definition: cmdutils.c:2161
GET_SAMPLE_FMT_NAME
#define GET_SAMPLE_FMT_NAME(sample_fmt)
Definition: cmdutils.h:628
swscale
static int swscale(SwsContext *c, const uint8_t *src[], int srcStride[], int srcSliceY, int srcSliceH, uint8_t *dst[], int dstStride[])
Definition: swscale.c:238
av_demuxer_iterate
const AVInputFormat * av_demuxer_iterate(void **opaque)
Iterate over all registered demuxers.
Definition: allformats.c:558
printf
printf("static const uint8_t my_array[100] = {\n")
gray
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit gray
Definition: swscale.txt:52
show_protocols
int show_protocols(void *optctx, const char *opt, const char *arg)
Print a listing containing all the protocols supported by the program.
Definition: cmdutils.c:1669
av_log_get_flags
int av_log_get_flags(void)
Definition: log.c:450
avdevice.h
AVFilter::description
const char * description
A description of the filter.
Definition: avfilter.h:156
avdevice_free_list_devices
void avdevice_free_list_devices(AVDeviceInfoList **device_list)
Convenient function to free result of avdevice_list_devices().
Definition: avdevice.c:145
header
static const uint8_t header[24]
Definition: sdr2.c:67
AV_OPT_SEARCH_CHILDREN
#define AV_OPT_SEARCH_CHILDREN
Search in possible children of the given object first.
Definition: opt.h:560
split
static char * split(char *message, char delim)
Definition: af_channelmap.c:81
CONFIG_FOOBAR_FILTER
#define CONFIG_FOOBAR_FILTER
av_input_audio_device_next
AVInputFormat * av_input_audio_device_next(AVInputFormat *d)
Audio input devices iterator.
Definition: alldevices.c:123
encode
static void encode(AVCodecContext *ctx, AVFrame *frame, AVPacket *pkt, FILE *output)
Definition: encode_audio.c:95
height
#define height
FFMIN
#define FFMIN(a, b)
Definition: common.h:105
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
H
#define H
Definition: pixlet.c:39
AV_CODEC_CAP_SLICE_THREADS
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: codec.h:112
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
line
Definition: graph2dot.c:48
FF_FILTER_FORWARD_WANTED
FF_FILTER_FORWARD_WANTED(outlink, inlink)
attributes.h
av_pix_fmt_desc_get_id
enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc)
Definition: pixdesc.c:2592
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
show_devices
int show_devices(void *optctx, const char *opt, const char *arg)
Print a listing containing all the devices supported by the program.
Definition: cmdutils.c:1370
av_strstart
int av_strstart(const char *str, const char *pfx, const char **ptr)
Return non-zero if pfx is a prefix of str.
Definition: avstring.c:34
that
if it could not because there are no more it should return AVERROR_EOF The typical implementation of request_frame for a filter with several inputs will look like that
Definition: filter_design.txt:273
N
#define N
Definition: af_mcompand.c:54
fact
static double fact(double i)
Definition: af_aiir.c:952
va_copy
#define va_copy(dst, src)
Definition: va_copy.h:31
version
version
Definition: libkvazaar.c:326
predictor
static void predictor(uint8_t *src, ptrdiff_t size)
Definition: exrenc.c:163
AVDeviceInfo::device_description
char * device_description
human friendly name
Definition: avdevice.h:459
avdevice_list_output_sinks
int avdevice_list_output_sinks(AVOutputFormat *device, const char *device_name, AVDictionary *device_options, AVDeviceInfoList **device_list)
Definition: avdevice.c:134
show_pix_fmts
int show_pix_fmts(void *optctx, const char *opt, const char *arg)
Print a listing containing all the pixel formats supported by the program.
Definition: cmdutils.c:1745
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
interpolation
static int interpolation(DeclickChannel *c, const double *src, int ar_order, double *acoefficients, int *index, int nb_errors, double *auxiliary, double *interpolated)
Definition: af_adeclick.c:365
Y
#define Y
Definition: boxblur.h:38
help
static void help(void)
Definition: dct.c:451
introduced
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was introduced
Definition: undefined.txt:38
AVOutputFormat::mime_type
const char * mime_type
Definition: avformat.h:498
XMM_CLOBBERS_ONLY
#define XMM_CLOBBERS_ONLY(...)
Definition: asm.h:99
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:205
implementations
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec implementations
Definition: multithreading.txt:29
distribution
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API and a limited distribution
Definition: writing_filters.txt:8
show_sample_fmts
int show_sample_fmts(void *optctx, const char *opt, const char *arg)
Print a listing containing all the sample formats supported by the program.
Definition: cmdutils.c:1807
avcodec_descriptor_next
const AVCodecDescriptor * avcodec_descriptor_next(const AVCodecDescriptor *prev)
Iterate over all codec descriptors known to libavcodec.
Definition: codec_desc.c:3507
avcodec_find_encoder_by_name
AVCodec * avcodec_find_encoder_by_name(const char *name)
Find a registered encoder with the specified name.
Definition: allcodecs.c:969
AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:126
b
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not b
Definition: undefined.txt:32
show_banner
void show_banner(int argc, char **argv, const OptionDef *options)
Print the program banner to stderr.
Definition: cmdutils.c:1183
av_codec_is_encoder
int av_codec_is_encoder(const AVCodec *codec)
Definition: utils.c:74
layout
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel layout
Definition: filter_design.txt:18
program_exit
static void(* program_exit)(int ret)
Definition: cmdutils.c:126
flag
#define flag(name)
Definition: cbs_av1.c:553
register_exit
void register_exit(void(*cb)(int ret))
Register a program-specific cleanup routine.
Definition: cmdutils.c:128
GET_CH_LAYOUT_DESC
#define GET_CH_LAYOUT_DESC(ch_layout)
Definition: cmdutils.h:639
in
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
Definition: audio_convert.c:326
AV_CODEC_PROP_LOSSLESS
#define AV_CODEC_PROP_LOSSLESS
Codec supports lossless compression.
Definition: codec_desc.h:82
av_find_input_format
ff_const59 AVInputFormat * av_find_input_format(const char *short_name)
Find AVInputFormat based on the short name of the input format.
Definition: format.c:118
tests
const TestCase tests[]
Definition: fifo_muxer.c:243
less
static int less(MetadataContext *s, const char *value1, const char *value2)
Definition: f_metadata.c:148
av_log_set_level
void av_log_set_level(int level)
Set the log level.
Definition: log.c:440
Type
Type
Definition: vf_idet.h:29
bprint.h
i
int i
Definition: input.c:407
AV_CODEC_ID_NONE
@ AV_CODEC_ID_NONE
Definition: codec_id.h:47
AVOutputFormat
Definition: avformat.h:490
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
print_all_libs_info
static void print_all_libs_info(int flags, int level)
Definition: cmdutils.c:1130
round
static av_always_inline av_const double round(double x)
Definition: libm.h:444
OPT_TIME
#define OPT_TIME
Definition: cmdutils.h:176
swr_opts
AVDictionary * swr_opts
Definition: cmdutils.c:69
LIBAVFILTER_VERSION_MINOR
#define LIBAVFILTER_VERSION_MINOR
Definition: version.h:33
available
if no frame is available
Definition: filter_design.txt:166
Code
One code in hash table.
Definition: lzwenc.c:42
display.h
needed
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is needed
Definition: filter_design.txt:212
AVSampleFormat
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:58
delta
float delta
Definition: vorbis_enc_data.h:457
draw_edges
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use as it s useful too and the implementation is trivial when you re doing this Note that draw_edges() needs to be called before reporting progress. Before accessing a reference frame or its MVs
filter_codec_opts
AVDictionary * filter_codec_opts(AVDictionary *opts, enum AVCodecID codec_id, AVFormatContext *s, AVStream *st, const AVCodec *codec)
Filter out options for given codec.
Definition: cmdutils.c:2103
ilog2
any process which generates a stream compliant to the syntactical and semantic requirements and which is decodable by the process described in this spec shall be considered a conformant Snow encoder but not strictly required ilog2(x) is the rounded down logarithm of x with basis 2 ilog2(0)=0Type definitions
Definition: snow.txt:23
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
av_toupper
static av_const int av_toupper(int c)
Locale-independent conversion of ASCII characters to uppercase.
Definition: avstring.h:236
AVMEDIA_TYPE_ATTACHMENT
@ AVMEDIA_TYPE_ATTACHMENT
Opaque data information usually sparse.
Definition: avutil.h:205
AV_OPT_FLAG_DECODING_PARAM
#define AV_OPT_FLAG_DECODING_PARAM
a generic parameter which can be set by the user for demuxing or decoding
Definition: opt.h:279
CONFIG_HARDCODED_TABLES
#define CONFIG_HARDCODED_TABLES
Definition: aacps_tablegen_template.c:25
description
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf description
Definition: writing_filters.txt:86
SUINT
#define SUINT
Definition: dct32_template.c:30
uint8_t
uint8_t
Definition: audio_convert.c:194
SHOW_MUXERS
@ SHOW_MUXERS
Definition: cmdutils.c:79
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:237
get_codecs_sorted
static unsigned get_codecs_sorted(const AVCodecDescriptor ***rcodecs)
Definition: cmdutils.c:1516
filter
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector filter
Definition: writing_filters.txt:60
pix_sum
static int pix_sum(uint8_t *pix, int line_size, int w, int h)
Definition: snowenc.c:165
OPT_OUTPUT
#define OPT_OUTPUT
Definition: cmdutils.h:179
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:204
update_thread_context
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have update_thread_context() run it in the next thread. Add AV_CODEC_CAP_FRAME_THREADS to the codec capabilities. There will be very little speed gain at this point but it should work. If there are inter-frame dependencies
CONTEXT
#define CONTEXT
Definition: af_asetrate.c:31
opt_timelimit
int opt_timelimit(void *optctx, const char *opt, const char *arg)
Limit the execution time.
Definition: cmdutils.c:1071
OPT_OFFSET
#define OPT_OFFSET
Definition: cmdutils.h:174
mv_scale
static av_always_inline void mv_scale(Mv *dst, Mv *src, int td, int tb)
Definition: hevc_mvs.c:115
headroom
static int headroom(int *la)
Definition: nellymoser.c:104
plain
static const uint8_t plain[]
Definition: aes_ctr.c:24
opt_max_alloc
int opt_max_alloc(void *optctx, const char *opt, const char *arg)
Definition: cmdutils.c:1057
nb_output_files
int nb_output_files
Definition: ffmpeg.c:156
OptionParseContext::groups
OptionGroupList * groups
Definition: cmdutils.h:336
av_codec_iterate
const AVCodec * av_codec_iterate(void **opaque)
Iterate over all registered codecs.
Definition: allcodecs.c:859
log2
#define log2(x)
Definition: libm.h:404
needed
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be needed
Definition: swscale.txt:45
parse_optgroup
int parse_optgroup(void *optctx, OptionGroup *g)
Parse an options group and write results into optctx.
Definition: cmdutils.c:410
OptionDef::u
union OptionDef::@1 u
parse_loglevel
void parse_loglevel(int argc, char **argv, const OptionDef *options)
Find the '-loglevel' option in the command line args and apply it.
Definition: cmdutils.c:502
AVInputFormat::get_device_list
int(* get_device_list)(struct AVFormatContext *s, struct AVDeviceInfoList *device_list)
Returns device list with it properties.
Definition: avformat.h:772
AVFilter
Filter definition.
Definition: avfilter.h:145
version.h
OptionGroup::sws_dict
AVDictionary * sws_dict
Definition: cmdutils.h:318
directory
FFmpeg currently uses a custom build this text attempts to document some of its obscure features and options Makefile the full command issued by make and its output will be shown on the screen DBG Preprocess x86 external assembler files to a dbg asm file in the object directory
Definition: build_system.txt:12
language
Undefined Behavior In the C language
Definition: undefined.txt:3
SpecifierOpt
Definition: cmdutils.h:146
OptionGroup::resample_opts
AVDictionary * resample_opts
Definition: cmdutils.h:317
array
static int array[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:106
G
#define G
Definition: huffyuvdsp.h:33
files
Writing a table generator This documentation is preliminary Parts of the API are not good and should be changed Basic concepts A table generator consists of two files
Definition: tablegen.txt:8
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:873
AV_LOG_FATAL
#define AV_LOG_FATAL
Something went wrong and recovery is not possible.
Definition: log.h:188
pixfmt
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:365
__asm__
__asm__(".macro parse_r var r\n\t" "\\var = -1\n\t" _IFC_REG(0) _IFC_REG(1) _IFC_REG(2) _IFC_REG(3) _IFC_REG(4) _IFC_REG(5) _IFC_REG(6) _IFC_REG(7) _IFC_REG(8) _IFC_REG(9) _IFC_REG(10) _IFC_REG(11) _IFC_REG(12) _IFC_REG(13) _IFC_REG(14) _IFC_REG(15) _IFC_REG(16) _IFC_REG(17) _IFC_REG(18) _IFC_REG(19) _IFC_REG(20) _IFC_REG(21) _IFC_REG(22) _IFC_REG(23) _IFC_REG(24) _IFC_REG(25) _IFC_REG(26) _IFC_REG(27) _IFC_REG(28) _IFC_REG(29) _IFC_REG(30) _IFC_REG(31) ".iflt \\var\n\t" ".error \"Unable to parse register name \\r\"\n\t" ".endif\n\t" ".endm")
pred
static const float pred[4]
Definition: siprdata.h:259
currently
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit it is converted to planar bit YUV Two sets of converters exist for this currently
Definition: swscale.txt:54
read_yesno
int read_yesno(void)
Return a positive value if a line read from standard input starts with [yY], otherwise return 0.
Definition: cmdutils.c:2034
links
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output links
Definition: filter_design.txt:14
av_strtod
double av_strtod(const char *numstr, char **tail)
Parse the string in numstr and return its value as a double.
Definition: eval.c:106
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
comment
static int FUNC() comment(CodedBitstreamContext *ctx, RWContext *rw, JPEGRawComment *current)
Definition: cbs_jpeg_syntax_template.c:174
av_strlcat
size_t av_strlcat(char *dst, const char *src, size_t size)
Append the string src to the string dst, but to a total length of no more than size - 1 bytes,...
Definition: avstring.c:93
OptionGroup::arg
const char * arg
Definition: cmdutils.h:310
AVDeviceInfoList
List of devices.
Definition: avdevice.h:465
avcodec_find_decoder
AVCodec * avcodec_find_decoder(enum AVCodecID id)
Find a registered decoder with a matching codec ID.
Definition: allcodecs.c:946
uninit_parse_context
void uninit_parse_context(OptionParseContext *octx)
Free all allocated memory in an OptionParseContext.
Definition: cmdutils.c:723
log_callback_help
void log_callback_help(void *ptr, int level, const char *fmt, va_list vl)
Trivial log callback.
Definition: cmdutils.c:96
OPT_PERFILE
#define OPT_PERFILE
Definition: cmdutils.h:173
av_opt_get_key_value
int av_opt_get_key_value(const char **ropts, const char *key_val_sep, const char *pairs_sep, unsigned flags, char **rkey, char **rval)
Extract a key-value pair from the beginning of a string.
Definition: opt.c:1537
avformat.h
av_stream_get_side_data
uint8_t * av_stream_get_side_data(const AVStream *stream, enum AVPacketSideDataType type, size_t *size)
Get side information from stream.
av_bprintf
void av_bprintf(AVBPrint *buf, const char *fmt,...)
Definition: bprint.c:94
dict.h
ff_thread_finish_setup
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call ff_thread_finish_setup() afterwards. If some code can 't be moved
av_get_channel_description
const char * av_get_channel_description(uint64_t channel)
Get the description of a given channel.
Definition: channel_layout.c:260
AV_LOG_SKIP_REPEATED
#define AV_LOG_SKIP_REPEATED
Skip repeated messages, this requires the user app to use av_log() instead of (f)printf as the 2 woul...
Definition: log.h:384
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
add_opt
static void add_opt(OptionParseContext *octx, const OptionDef *opt, const char *key, const char *val)
Definition: cmdutils.c:689
show_codecs
int show_codecs(void *optctx, const char *opt, const char *arg)
Print a listing containing all the codecs supported by the program.
Definition: cmdutils.c:1550
init_report
static int init_report(const char *env)
Definition: cmdutils.c:974
avfilter_pad_get_type
enum AVMediaType avfilter_pad_get_type(const AVFilterPad *pads, int pad_idx)
Get the type of an AVFilterPad.
Definition: avfilter.c:1055
L
#define L(x)
Definition: vp56_arith.h:36
AVCodecContext
main external API structure.
Definition: avcodec.h:536
compare
static float compare(const AVFrame *haystack, const AVFrame *obj, int offx, int offy)
Definition: vf_find_rect.c:104
av_muxer_iterate
const AVOutputFormat * av_muxer_iterate(void **opaque)
Iterate over all registered muxers.
Definition: allformats.c:541
parse_option
int parse_option(void *optctx, const char *opt, const char *arg, const OptionDef *options)
Parse one given option.
Definition: cmdutils.c:346
get_rotation
double get_rotation(AVStream *st)
Definition: cmdutils.c:2200
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Non-inlined equivalent of av_mallocz_array().
Definition: mem.c:245
AVDeviceInfoList::default_device
int default_device
index of default device or -1 if no default
Definition: avdevice.h:468
av_opt_child_class_iterate
const FF_ENABLE_DEPRECATION_WARNINGS AVClass * av_opt_child_class_iterate(const AVClass *parent, void **iter)
Iterate over potential AVOptions-enabled children of parent.
Definition: opt.c:1731
opt_cpuflags
int opt_cpuflags(void *optctx, const char *opt, const char *arg)
Override the cpuflags.
Definition: cmdutils.c:855
again
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining again
Definition: filter_design.txt:25
if
if(!keyframe)
Definition: snow.txt:61
sws_freeContext
void sws_freeContext(struct SwsContext *swsContext)
Free the swscaler context swsContext.
Definition: utils.c:2337
AVBitStreamFilter
Definition: bsf.h:98
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
SHOW_DEMUXERS
@ SHOW_DEMUXERS
Definition: cmdutils.c:78
get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling get_buffer()
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
Compensation
Motion Compensation
Definition: snow.txt:418
Transform
Definition: deshake.h:47
headers
FFmpeg currently uses a custom build this text attempts to document some of its obscure features and options Makefile the full command issued by make and its output will be shown on the screen DBG Preprocess x86 external assembler files to a dbg asm file in the object which then gets compiled Helps in developing those assembler files DESTDIR Destination directory for the install useful to prepare packages or install FFmpeg in cross environments GEN Set to ‘1’ to generate the missing or mismatched references Makefile builds all the libraries and the executables fate Run the fate test note that you must have installed it fate list List all fate regression test targets install Install headers
Definition: build_system.txt:34
avfilter.h
av_match_name
int av_match_name(const char *name, const char *names)
Match instances of a name in a comma-separated list of names.
Definition: avstring.c:353
video
A Quick Description Of Rate Distortion Theory We want to encode a video
Definition: rate_distortion.txt:3
test
static void test(const char *pattern, const char *host)
Definition: noproxy.c:23
av_dict_parse_string
int av_dict_parse_string(AVDictionary **pm, const char *str, const char *key_val_sep, const char *pairs_sep, int flags)
Parse the key/value pairs list and add the parsed entries to a dictionary.
Definition: dict.c:180
values
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return values
Definition: filter_design.txt:263
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:107
AVOutputFormat::video_codec
enum AVCodecID video_codec
default video codec
Definition: avformat.h:502
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:77
samples
Filter the word “frame” indicates either a video frame or a group of audio samples
Definition: filter_design.txt:8
mean
static float mean(const float *input, int size)
Definition: vf_nnedi.c:864
Option::val
const char * val
Definition: cmdutils.h:290
note
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 note
Definition: snow.txt:555
GROW_ARRAY
#define GROW_ARRAY(array, nb_elems)
Definition: cmdutils.h:619
IDCT
#define IDCT(H)
Definition: hevcdsp_template.c:240
avcodec_get_hw_config
const AVCodecHWConfig * avcodec_get_hw_config(const AVCodec *codec, int index)
Retrieve supported hardware configurations for a codec.
Definition: utils.c:904
ff_outlink_get_status
int ff_outlink_get_status(AVFilterLink *link)
Get the status on an output link.
Definition: avfilter.c:1643
AVFilterContext
An instance of a filter.
Definition: avfilter.h:341
need
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector so you can update the boilerplate with your credits Doxy Next chunk is the Doxygen about the file See and add some references if you feel like it Context Skip the headers and scroll down to the definition of FoobarContext This is your state context It is already filled with when you get it so do not worry about uninitialized reads into this context This is where you put all global information that you need
Definition: writing_filters.txt:75
factor
static const int factor[16]
Definition: vf_pp7.c:77
AV_CODEC_CAP_PARAM_CHANGE
#define AV_CODEC_CAP_PARAM_CHANGE
Codec supports changed parameters at any point.
Definition: codec.h:116
timeline
Definition: dashdec.c:46
https
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector so you can update the boilerplate with your credits Doxy Next chunk is the Doxygen about the file See https
Definition: writing_filters.txt:66
bad
static int bad(InterplayACMContext *s, unsigned ind, unsigned col)
Definition: interplayacm.c:116
print_program_info
static void print_program_info(int flags, int level)
Definition: cmdutils.c:1143
shift
static int shift(int a, int b)
Definition: sonic.c:82
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
opt_default
int opt_default(void *optctx, const char *opt, const char *arg)
Fallback for options that are not explicitly handled, these will be parsed through AVOptions.
Definition: cmdutils.c:541
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:253
desc
const char * desc
Definition: libsvtav1.c:79
review
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API and a limited review
Definition: writing_filters.txt:8
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
av_log_default_callback
void av_log_default_callback(void *ptr, int level, const char *fmt, va_list vl)
Default logging callback.
Definition: log.c:346
add
static float add(float src0, float src1)
Definition: dnn_backend_native_layer_mathbinary.c:36
AV_CODEC_CAP_SUBFRAMES
#define AV_CODEC_CAP_SUBFRAMES
Codec can output multiple frames per AVPacket Normally demuxers return one frame at a time,...
Definition: codec.h:95
AV_OPT_FLAG_SUBTITLE_PARAM
#define AV_OPT_FLAG_SUBTITLE_PARAM
Definition: opt.h:282
overflow
Undefined Behavior In the C some operations are like signed integer overflow
Definition: undefined.txt:3
show_colors
int show_colors(void *optctx, const char *opt, const char *arg)
Print a listing containing all the color names and values recognized by the program.
Definition: cmdutils.c:1731
codec_ids
static enum AVCodecID codec_ids[]
Definition: aac_adtstoasc_bsf.c:148
FF_CODEC_CAP_ALLOCATE_PROGRESS
#define FF_CODEC_CAP_ALLOCATE_PROGRESS
Definition: internal.h:76
L3
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 L3
Definition: snow.txt:554
it
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s it
Definition: writing_filters.txt:31
AV_CODEC_CAP_HYBRID
#define AV_CODEC_CAP_HYBRID
Codec is potentially backed by a hardware implementation, but not necessarily.
Definition: codec.h:164
av_get_known_color_name
const char * av_get_known_color_name(int color_idx, const uint8_t **rgbp)
Get the name of a color from the internal table of hard-coded named colors.
Definition: parseutils.c:434
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
OptionDef::name
const char * name
Definition: cmdutils.h:159
show_filters
int show_filters(void *optctx, const char *opt, const char *arg)
Print a listing containing all the filters supported by the program.
Definition: cmdutils.c:1684
show_encoders
int show_encoders(void *optctx, const char *opt, const char *arg)
Print a listing containing all the encoders supported by the program.
Definition: cmdutils.c:1651
functions
static const struct drawtext_function functions[]
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
AVDictionaryEntry
Definition: dict.h:81
opt_loglevel
int opt_loglevel(void *optctx, const char *opt, const char *arg)
Set the libav* libraries log level.
Definition: cmdutils.c:867
show_help_protocol
static void show_help_protocol(const char *name)
Definition: cmdutils.c:1870
FF_FILTER_FORWARD_STATUS
FF_FILTER_FORWARD_STATUS(inlink, outlink)
equal
static int equal(MetadataContext *s, const char *value1, const char *value2)
Definition: f_metadata.c:138
cr
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:216
OptionGroupDef::sep
const char * sep
Option to be used as group separator.
Definition: cmdutils.h:300
channel_layouts
static const uint16_t channel_layouts[7]
Definition: dca_lbr.c:114
status_in
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the then the processing requires a frame on this link and the filter is expected to make efforts in that direction The status of input links is stored by the status_in
Definition: filter_design.txt:154
AVFILTER_FLAG_SUPPORT_TIMELINE
#define AVFILTER_FLAG_SUPPORT_TIMELINE
Handy mask to test whether the filter supports or no the timeline feature (internally or generically)...
Definition: avfilter.h:139
avoid
the frame and frame reference mechanism is intended to avoid
Definition: filter_design.txt:45
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
cmdutils.h
ready
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already ready
Definition: filter_design.txt:258
htaps
static const double htaps[HTAPS]
The 2nd half (48 coeffs) of a 96-tap symmetric lowpass filter.
Definition: dsd_tablegen.h:55
OPT_BOOL
#define OPT_BOOL
Definition: cmdutils.h:162
once
static pthread_once_t once
Definition: ffjni.c:36
inverse
static uint32_t inverse(uint32_t v)
find multiplicative inverse modulo 2 ^ 32
Definition: asfcrypt.c:35
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:243
convert_header.str
string str
Definition: convert_header.py:20
parse_time_or_die
int64_t parse_time_or_die(const char *context, const char *timestr, int is_duration)
Parse a string specifying a time and return its corresponding value as a number of microseconds.
Definition: cmdutils.c:162
grow_array
void * grow_array(void *array, int elem_size, int *size, int new_size)
Realloc array to hold new_size elements of elem_size.
Definition: cmdutils.c:2181
imgutils.h
OutputStream
Definition: muxing.c:53
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
rgb
static const SheerTable rgb[2]
Definition: sheervideodata.h:32
AV_CODEC_CAP_DRAW_HORIZ_BAND
#define AV_CODEC_CAP_DRAW_HORIZ_BAND
Decoder can use draw_horiz_band callback.
Definition: codec.h:44
av_strlcpy
size_t av_strlcpy(char *dst, const char *src, size_t size)
Copy the string src to dst, but no more than size - 1 bytes, and null-terminate dst.
Definition: avstring.c:83
coeff
static const double coeff[2][5]
Definition: vf_owdenoise.c:73
block
The exact code depends on how similar the blocks are and how related they are to the block
Definition: filter_design.txt:207
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
OptionParseContext::nb_groups
int nb_groups
Definition: cmdutils.h:337
AV_CODEC_CAP_AVOID_PROBING
#define AV_CODEC_CAP_AVOID_PROBING
Decoder is not a preferred choice for probing.
Definition: codec.h:139
pix_norm1
static int pix_norm1(uint8_t *pix, int line_size, int w)
Definition: snowenc.c:181
find_option
static const OptionDef * find_option(const OptionDef *po, const char *name)
Definition: cmdutils.c:216
AVCodecHWConfig
Definition: codec.h:443
uninit
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
h
h
Definition: vp9dsp_template.c:2038
ff_outlink_frame_wanted
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the ff_outlink_frame_wanted() function. If this function returns true
avcodec_descriptor_get
const AVCodecDescriptor * avcodec_descriptor_get(enum AVCodecID id)
Definition: codec_desc.c:3501
Sequence
@ Sequence
Definition: mxf.h:37
AVDictionaryEntry::value
char * value
Definition: dict.h:83
avstring.h
dimension
The official guide to swscale for confused that consecutive non overlapping rectangles of dimension(0, slice_top) -(picture_width
project
static float project(float origin_x, float origin_y, float dest_x, float dest_y, int point_x, int point_y)
Definition: vsrc_gradients.c:156
show_help_options
void show_help_options(const OptionDef *options, const char *msg, int req_flags, int rej_flags, int alt_flags)
Print help for all options matching specified flags.
Definition: cmdutils.c:174
show_bsfs
int show_bsfs(void *optctx, const char *opt, const char *arg)
Print a listing containing all the bit stream filters supported by the program.
Definition: cmdutils.c:1657
Otherwise
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied Otherwise
Definition: writing_filters.txt:6
show_license
int show_license(void *optctx, const char *opt, const char *arg)
Print the license of the program to stdout.
Definition: cmdutils.c:1211
PRINT_LIB_INFO
#define PRINT_LIB_INFO(libname, LIBNAME, flags, level)
Definition: cmdutils.c:1101
monolithic
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is monolithic
Definition: writing_filters.txt:4
avcodec_descriptor_get_by_name
const AVCodecDescriptor * avcodec_descriptor_get_by_name(const char *name)
Definition: codec_desc.c:3516
planar
uint8_t pi<< 24) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_U8,(uint64_t)((*(const uint8_t *) pi - 0x80U))<< 56) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16,(*(const int16_t *) pi >>8)+0x80) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_S16, *(const int16_t *) pi *(1<< 16)) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_S16,(uint64_t)(*(const int16_t *) pi)<< 48) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32,(*(const int32_t *) pi >>24)+0x80) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_S32,(uint64_t)(*(const int32_t *) pi)<< 32) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S64,(*(const int64_t *) pi >>56)+0x80) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S64, *(const int64_t *) pi *(1.0f/(UINT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S64, *(const int64_t *) pi *(1.0/(UINT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_FLT, llrintf(*(const float *) pi *(UINT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_DBL, llrint(*(const double *) pi *(UINT64_C(1)<< 63))) #define FMT_PAIR_FUNC(out, in) static conv_func_type *const fmt_pair_to_conv_functions[AV_SAMPLE_FMT_NB *AV_SAMPLE_FMT_NB]={ FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S64), };static void cpy1(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, len);} static void cpy2(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 2 *len);} static void cpy4(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 4 *len);} static void cpy8(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 8 *len);} AudioConvert *swri_audio_convert_alloc(enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, const int *ch_map, int flags) { AudioConvert *ctx;conv_func_type *f=fmt_pair_to_conv_functions[av_get_packed_sample_fmt(out_fmt)+AV_SAMPLE_FMT_NB *av_get_packed_sample_fmt(in_fmt)];if(!f) return NULL;ctx=av_mallocz(sizeof(*ctx));if(!ctx) return NULL;if(channels==1){ in_fmt=av_get_planar_sample_fmt(in_fmt);out_fmt=av_get_planar_sample_fmt(out_fmt);} ctx->channels=channels;ctx->conv_f=f;ctx->ch_map=ch_map;if(in_fmt==AV_SAMPLE_FMT_U8||in_fmt==AV_SAMPLE_FMT_U8P) memset(ctx->silence, 0x80, sizeof(ctx->silence));if(out_fmt==in_fmt &&!ch_map) { switch(av_get_bytes_per_sample(in_fmt)){ case 1:ctx->simd_f=cpy1;break;case 2:ctx->simd_f=cpy2;break;case 4:ctx->simd_f=cpy4;break;case 8:ctx->simd_f=cpy8;break;} } if(HAVE_X86ASM &&1) swri_audio_convert_init_x86(ctx, out_fmt, in_fmt, channels);if(ARCH_ARM) swri_audio_convert_init_arm(ctx, out_fmt, in_fmt, channels);if(ARCH_AARCH64) swri_audio_convert_init_aarch64(ctx, out_fmt, in_fmt, channels);return ctx;} void swri_audio_convert_free(AudioConvert **ctx) { av_freep(ctx);} int swri_audio_convert(AudioConvert *ctx, AudioData *out, AudioData *in, int len) { int ch;int off=0;const int os=(out->planar ? 1 :out->ch_count) *out->bps;unsigned misaligned=0;av_assert0(ctx->channels==out->ch_count);if(ctx->in_simd_align_mask) { int planes=in->planar ? in->ch_count :1;unsigned m=0;for(ch=0;ch< planes;ch++) m|=(intptr_t) in->ch[ch];misaligned|=m &ctx->in_simd_align_mask;} if(ctx->out_simd_align_mask) { int planes=out->planar ? out->ch_count :1;unsigned m=0;for(ch=0;ch< planes;ch++) m|=(intptr_t) out->ch[ch];misaligned|=m &ctx->out_simd_align_mask;} if(ctx->simd_f &&!ctx->ch_map &&!misaligned){ off=len &~15;av_assert1(off >=0);av_assert1(off<=len);av_assert2(ctx->channels==SWR_CH_MAX||!in->ch[ctx->channels]);if(off >0){ if(out->planar==in->planar){ int planes=out->planar ? out->ch_count :1;for(ch=0;ch< planes;ch++){ ctx->simd_f(out->ch+ch,(const uint8_t **) in->ch+ch, off *(out-> planar
Definition: audioconvert.c:56
AV_CODEC_CAP_SMALL_LAST_FRAME
#define AV_CODEC_CAP_SMALL_LAST_FRAME
Codec can be fed a final frame with a smaller size.
Definition: codec.h:82
SwsContext
Definition: swscale_internal.h:283
av_opt_show2
int av_opt_show2(void *obj, void *av_log_obj, int req_flags, int rej_flags)
Show the obj options.
Definition: opt.c:1346
show_help_muxer
static void show_help_muxer(const char *name)
Definition: cmdutils.c:1888
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:132
MpegEncContext
MpegEncContext.
Definition: mpegvideo.h:81
snprintf
#define snprintf
Definition: snprintf.h:34
filter
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 this can end with a L or a the number of elements shall be w s[-1] shall be considered equivalent to s[1] s[w] shall be considered equivalent to s[w-2] perform the lifting steps in order as described below Integer filter
Definition: snow.txt:562
sanitize
static void sanitize(uint8_t *line)
Definition: log.c:245
do
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they do
Definition: writing_filters.txt:90
OptionParseContext::cur_group
OptionGroup cur_group
Definition: cmdutils.h:340
passed
static int passed(HysteresisContext *s, int x, int y, int w)
Definition: vf_hysteresis.c:140
AV_LOG_PRINT_LEVEL
#define AV_LOG_PRINT_LEVEL
Include the log severity in messages originating from codecs.
Definition: log.h:392
AVCodecHWConfig::device_type
enum AVHWDeviceType device_type
The device type associated with the configuration.
Definition: codec.h:464
get_video_buffer
static AVFrame * get_video_buffer(AVFilterLink *inlink, int w, int h)
Definition: avf_concat.c:199
Therefore
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input Therefore
Definition: filter_design.txt:244
undefined
Undefined Behavior In the C some operations are undefined
Definition: undefined.txt:3
is_device
static int is_device(const AVClass *avclass)
Definition: cmdutils.c:1285
av_bprint_chars
void av_bprint_chars(AVBPrint *buf, char c, unsigned n)
Append char c n times to a print buffer.
Definition: bprint.c:140
show_demuxers
int show_demuxers(void *optctx, const char *opt, const char *arg)
Print a listing containing all the demuxer supported by the program (including devices).
Definition: cmdutils.c:1365
channel
channel
Definition: ebur128.h:39
swscale.h
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: aeval.c:273
match_group_separator
static int match_group_separator(const OptionGroupDef *groups, int nb_groups, const char *opt)
Definition: cmdutils.c:638
AVInputFormat::priv_class
const AVClass * priv_class
AVClass for the private context.
Definition: avformat.h:670
foobar
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf foobar
Definition: writing_filters.txt:84
OptionDef::func_arg
int(* func_arg)(void *, const char *, const char *)
Definition: cmdutils.h:182
opt_find
static const AVOption * opt_find(void *obj, const char *name, const char *unit, int opt_flags, int search_flags)
Definition: cmdutils.c:531
av_x_if_null
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:308
nb_output_streams
int nb_output_streams
Definition: ffmpeg.c:154
av_display_rotation_get
double av_display_rotation_get(const int32_t matrix[9])
Extract the rotation component of the transformation matrix.
Definition: display.c:34
OutputFile
Definition: ffmpeg.h:575
re
float re
Definition: fft.c:82
GEN
#define GEN(table)
program_birth_year
const int program_birth_year
program birth year, defined by the program for show_banner()
Definition: ffmpeg.c:110
min
float min
Definition: vorbis_enc_data.h:456
OptionDef::flags
int flags
Definition: cmdutils.h:160
OPT_DOUBLE
#define OPT_DOUBLE
Definition: cmdutils.h:177
dither
static const uint8_t dither[8][8]
Definition: vf_fspp.c:59