FFmpeg
avf_showwaves.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2012 Stefano Sabatini
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * audio to video multimedia filter
24  */
25 
26 #include "libavutil/avassert.h"
27 #include "libavutil/avstring.h"
29 #include "libavutil/opt.h"
30 #include "libavutil/parseutils.h"
31 #include "avfilter.h"
32 #include "filters.h"
33 #include "formats.h"
34 #include "audio.h"
35 #include "video.h"
36 #include "internal.h"
37 
44 };
45 
52 };
53 
58 };
59 
60 struct frame_node {
62  struct frame_node *next;
63 };
64 
65 typedef struct ShowWavesContext {
66  const AVClass *class;
67  int w, h;
69  char *colors;
70  int buf_idx;
71  int16_t *buf_idy; /* y coordinate of previous sample for each channel */
73  int n;
74  int pixstep;
76  int mode; ///< ShowWavesMode
77  int scale; ///< ShowWavesScale
78  int draw_mode; ///< ShowWavesDrawMode
81 
82  int (*get_h)(int16_t sample, int height);
83  void (*draw_sample)(uint8_t *buf, int height, int linesize,
84  int16_t *prev_y, const uint8_t color[4], int h);
85 
86  /* single picture */
90  int64_t total_samples;
91  int64_t *sum; /* abs sum of the samples per channel */
93 
94 #define OFFSET(x) offsetof(ShowWavesContext, x)
95 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
96 
97 static const AVOption showwaves_options[] = {
98  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
99  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
100  { "mode", "select display mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=MODE_POINT}, 0, MODE_NB-1, FLAGS, "mode"},
101  { "point", "draw a point for each sample", 0, AV_OPT_TYPE_CONST, {.i64=MODE_POINT}, .flags=FLAGS, .unit="mode"},
102  { "line", "draw a line for each sample", 0, AV_OPT_TYPE_CONST, {.i64=MODE_LINE}, .flags=FLAGS, .unit="mode"},
103  { "p2p", "draw a line between samples", 0, AV_OPT_TYPE_CONST, {.i64=MODE_P2P}, .flags=FLAGS, .unit="mode"},
104  { "cline", "draw a centered line for each sample", 0, AV_OPT_TYPE_CONST, {.i64=MODE_CENTERED_LINE}, .flags=FLAGS, .unit="mode"},
105  { "n", "set how many samples to show in the same point", OFFSET(n), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, FLAGS },
106  { "rate", "set video rate", OFFSET(rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },
107  { "r", "set video rate", OFFSET(rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },
108  { "split_channels", "draw channels separately", OFFSET(split_channels), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, FLAGS },
109  { "colors", "set channels colors", OFFSET(colors), AV_OPT_TYPE_STRING, {.str = "red|green|blue|yellow|orange|lime|pink|magenta|brown" }, 0, 0, FLAGS },
110  { "scale", "set amplitude scale", OFFSET(scale), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, SCALE_NB-1, FLAGS, .unit="scale" },
111  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LIN}, .flags=FLAGS, .unit="scale"},
112  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LOG}, .flags=FLAGS, .unit="scale"},
113  { "sqrt", "square root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_SQRT}, .flags=FLAGS, .unit="scale"},
114  { "cbrt", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_CBRT}, .flags=FLAGS, .unit="scale"},
115  { "draw", "set draw mode", OFFSET(draw_mode), AV_OPT_TYPE_INT, {.i64 = DRAW_SCALE}, 0, DRAW_NB-1, FLAGS, .unit="draw" },
116  { "scale", "scale pixel values for each drawn sample", 0, AV_OPT_TYPE_CONST, {.i64=DRAW_SCALE}, .flags=FLAGS, .unit="draw"},
117  { "full", "draw every pixel for sample directly", 0, AV_OPT_TYPE_CONST, {.i64=DRAW_FULL}, .flags=FLAGS, .unit="draw"},
118  { NULL }
119 };
120 
121 AVFILTER_DEFINE_CLASS(showwaves);
122 
124 {
125  ShowWavesContext *showwaves = ctx->priv;
126 
127  av_frame_free(&showwaves->outpicref);
128  av_freep(&showwaves->buf_idy);
129  av_freep(&showwaves->fg);
130 
131  if (showwaves->single_pic) {
132  struct frame_node *node = showwaves->audio_frames;
133  while (node) {
134  struct frame_node *tmp = node;
135 
136  node = node->next;
137  av_frame_free(&tmp->frame);
138  av_freep(&tmp);
139  }
140  av_freep(&showwaves->sum);
141  showwaves->last_frame = NULL;
142  }
143 }
144 
146 {
149  AVFilterLink *inlink = ctx->inputs[0];
150  AVFilterLink *outlink = ctx->outputs[0];
153  int ret;
154 
155  /* set input audio formats */
157  if ((ret = ff_formats_ref(formats, &inlink->out_formats)) < 0)
158  return ret;
159 
161  if ((ret = ff_channel_layouts_ref(layouts, &inlink->out_channel_layouts)) < 0)
162  return ret;
163 
165  if ((ret = ff_formats_ref(formats, &inlink->out_samplerates)) < 0)
166  return ret;
167 
168  /* set output video format */
170  if ((ret = ff_formats_ref(formats, &outlink->in_formats)) < 0)
171  return ret;
172 
173  return 0;
174 }
175 
176 static int get_lin_h(int16_t sample, int height)
177 {
178  return height/2 - av_rescale(sample, height/2, INT16_MAX);
179 }
180 
181 static int get_lin_h2(int16_t sample, int height)
182 {
183  return av_rescale(FFABS(sample), height, INT16_MAX);
184 }
185 
186 static int get_log_h(int16_t sample, int height)
187 {
188  return height/2 - FFSIGN(sample) * (log10(1 + FFABS(sample)) * (height/2) / log10(1 + INT16_MAX));
189 }
190 
191 static int get_log_h2(int16_t sample, int height)
192 {
193  return log10(1 + FFABS(sample)) * height / log10(1 + INT16_MAX);
194 }
195 
196 static int get_sqrt_h(int16_t sample, int height)
197 {
198  return height/2 - FFSIGN(sample) * (sqrt(FFABS(sample)) * (height/2) / sqrt(INT16_MAX));
199 }
200 
201 static int get_sqrt_h2(int16_t sample, int height)
202 {
203  return sqrt(FFABS(sample)) * height / sqrt(INT16_MAX);
204 }
205 
206 static int get_cbrt_h(int16_t sample, int height)
207 {
208  return height/2 - FFSIGN(sample) * (cbrt(FFABS(sample)) * (height/2) / cbrt(INT16_MAX));
209 }
210 
211 static int get_cbrt_h2(int16_t sample, int height)
212 {
213  return cbrt(FFABS(sample)) * height / cbrt(INT16_MAX);
214 }
215 
216 static void draw_sample_point_rgba_scale(uint8_t *buf, int height, int linesize,
217  int16_t *prev_y,
218  const uint8_t color[4], int h)
219 {
220  if (h >= 0 && h < height) {
221  buf[h * linesize + 0] += color[0];
222  buf[h * linesize + 1] += color[1];
223  buf[h * linesize + 2] += color[2];
224  buf[h * linesize + 3] += color[3];
225  }
226 }
227 
228 static void draw_sample_point_rgba_full(uint8_t *buf, int height, int linesize,
229  int16_t *prev_y,
230  const uint8_t color[4], int h)
231 {
232  if (h >= 0 && h < height) {
233  buf[h * linesize + 0] = color[0];
234  buf[h * linesize + 1] = color[1];
235  buf[h * linesize + 2] = color[2];
236  buf[h * linesize + 3] = color[3];
237  }
238 }
239 
240 static void draw_sample_line_rgba_scale(uint8_t *buf, int height, int linesize,
241  int16_t *prev_y,
242  const uint8_t color[4], int h)
243 {
244  int k;
245  int start = height/2;
246  int end = av_clip(h, 0, height-1);
247  if (start > end)
248  FFSWAP(int16_t, start, end);
249  for (k = start; k < end; k++) {
250  buf[k * linesize + 0] += color[0];
251  buf[k * linesize + 1] += color[1];
252  buf[k * linesize + 2] += color[2];
253  buf[k * linesize + 3] += color[3];
254  }
255 }
256 
257 static void draw_sample_line_rgba_full(uint8_t *buf, int height, int linesize,
258  int16_t *prev_y,
259  const uint8_t color[4], int h)
260 {
261  int k;
262  int start = height/2;
263  int end = av_clip(h, 0, height-1);
264  if (start > end)
265  FFSWAP(int16_t, start, end);
266  for (k = start; k < end; k++) {
267  buf[k * linesize + 0] = color[0];
268  buf[k * linesize + 1] = color[1];
269  buf[k * linesize + 2] = color[2];
270  buf[k * linesize + 3] = color[3];
271  }
272 }
273 
274 static void draw_sample_p2p_rgba_scale(uint8_t *buf, int height, int linesize,
275  int16_t *prev_y,
276  const uint8_t color[4], int h)
277 {
278  int k;
279  if (h >= 0 && h < height) {
280  buf[h * linesize + 0] += color[0];
281  buf[h * linesize + 1] += color[1];
282  buf[h * linesize + 2] += color[2];
283  buf[h * linesize + 3] += color[3];
284  if (*prev_y && h != *prev_y) {
285  int start = *prev_y;
286  int end = av_clip(h, 0, height-1);
287  if (start > end)
288  FFSWAP(int16_t, start, end);
289  for (k = start + 1; k < end; k++) {
290  buf[k * linesize + 0] += color[0];
291  buf[k * linesize + 1] += color[1];
292  buf[k * linesize + 2] += color[2];
293  buf[k * linesize + 3] += color[3];
294  }
295  }
296  }
297  *prev_y = h;
298 }
299 
300 static void draw_sample_p2p_rgba_full(uint8_t *buf, int height, int linesize,
301  int16_t *prev_y,
302  const uint8_t color[4], int h)
303 {
304  int k;
305  if (h >= 0 && h < height) {
306  buf[h * linesize + 0] = color[0];
307  buf[h * linesize + 1] = color[1];
308  buf[h * linesize + 2] = color[2];
309  buf[h * linesize + 3] = color[3];
310  if (*prev_y && h != *prev_y) {
311  int start = *prev_y;
312  int end = av_clip(h, 0, height-1);
313  if (start > end)
314  FFSWAP(int16_t, start, end);
315  for (k = start + 1; k < end; k++) {
316  buf[k * linesize + 0] = color[0];
317  buf[k * linesize + 1] = color[1];
318  buf[k * linesize + 2] = color[2];
319  buf[k * linesize + 3] = color[3];
320  }
321  }
322  }
323  *prev_y = h;
324 }
325 
326 static void draw_sample_cline_rgba_scale(uint8_t *buf, int height, int linesize,
327  int16_t *prev_y,
328  const uint8_t color[4], int h)
329 {
330  int k;
331  const int start = (height - h) / 2;
332  const int end = start + h;
333  for (k = start; k < end; k++) {
334  buf[k * linesize + 0] += color[0];
335  buf[k * linesize + 1] += color[1];
336  buf[k * linesize + 2] += color[2];
337  buf[k * linesize + 3] += color[3];
338  }
339 }
340  static void draw_sample_cline_rgba_full(uint8_t *buf, int height, int linesize,
341  int16_t *prev_y,
342  const uint8_t color[4], int h)
343 {
344  int k;
345  const int start = (height - h) / 2;
346  const int end = start + h;
347  for (k = start; k < end; k++) {
348  buf[k * linesize + 0] = color[0];
349  buf[k * linesize + 1] = color[1];
350  buf[k * linesize + 2] = color[2];
351  buf[k * linesize + 3] = color[3];
352  }
353 }
354 
355 static void draw_sample_point_gray(uint8_t *buf, int height, int linesize,
356  int16_t *prev_y,
357  const uint8_t color[4], int h)
358 {
359  if (h >= 0 && h < height)
360  buf[h * linesize] += color[0];
361 }
362 
363 static void draw_sample_line_gray(uint8_t *buf, int height, int linesize,
364  int16_t *prev_y,
365  const uint8_t color[4], int h)
366 {
367  int k;
368  int start = height/2;
369  int end = av_clip(h, 0, height-1);
370  if (start > end)
371  FFSWAP(int16_t, start, end);
372  for (k = start; k < end; k++)
373  buf[k * linesize] += color[0];
374 }
375 
376 static void draw_sample_p2p_gray(uint8_t *buf, int height, int linesize,
377  int16_t *prev_y,
378  const uint8_t color[4], int h)
379 {
380  int k;
381  if (h >= 0 && h < height) {
382  buf[h * linesize] += color[0];
383  if (*prev_y && h != *prev_y) {
384  int start = *prev_y;
385  int end = av_clip(h, 0, height-1);
386  if (start > end)
387  FFSWAP(int16_t, start, end);
388  for (k = start + 1; k < end; k++)
389  buf[k * linesize] += color[0];
390  }
391  }
392  *prev_y = h;
393 }
394 
395 static void draw_sample_cline_gray(uint8_t *buf, int height, int linesize,
396  int16_t *prev_y,
397  const uint8_t color[4], int h)
398 {
399  int k;
400  const int start = (height - h) / 2;
401  const int end = start + h;
402  for (k = start; k < end; k++)
403  buf[k * linesize] += color[0];
404 }
405 
406 static int config_output(AVFilterLink *outlink)
407 {
408  AVFilterContext *ctx = outlink->src;
409  AVFilterLink *inlink = ctx->inputs[0];
410  ShowWavesContext *showwaves = ctx->priv;
411  int nb_channels = inlink->channels;
412  char *colors, *saveptr = NULL;
413  uint8_t x;
414  int ch;
415 
416  if (showwaves->single_pic)
417  showwaves->n = 1;
418 
419  if (!showwaves->n)
420  showwaves->n = FFMAX(1, av_rescale_q(inlink->sample_rate, av_make_q(1, showwaves->w), showwaves->rate));
421 
422  showwaves->buf_idx = 0;
423  if (!(showwaves->buf_idy = av_mallocz_array(nb_channels, sizeof(*showwaves->buf_idy)))) {
424  av_log(ctx, AV_LOG_ERROR, "Could not allocate showwaves buffer\n");
425  return AVERROR(ENOMEM);
426  }
427  outlink->w = showwaves->w;
428  outlink->h = showwaves->h;
429  outlink->sample_aspect_ratio = (AVRational){1,1};
430 
431  outlink->frame_rate = av_div_q((AVRational){inlink->sample_rate,showwaves->n},
432  (AVRational){showwaves->w,1});
433 
434  av_log(ctx, AV_LOG_VERBOSE, "s:%dx%d r:%f n:%d\n",
435  showwaves->w, showwaves->h, av_q2d(outlink->frame_rate), showwaves->n);
436 
437  switch (outlink->format) {
438  case AV_PIX_FMT_GRAY8:
439  switch (showwaves->mode) {
440  case MODE_POINT: showwaves->draw_sample = draw_sample_point_gray; break;
441  case MODE_LINE: showwaves->draw_sample = draw_sample_line_gray; break;
442  case MODE_P2P: showwaves->draw_sample = draw_sample_p2p_gray; break;
443  case MODE_CENTERED_LINE: showwaves->draw_sample = draw_sample_cline_gray; break;
444  default:
445  return AVERROR_BUG;
446  }
447  showwaves->pixstep = 1;
448  break;
449  case AV_PIX_FMT_RGBA:
450  switch (showwaves->mode) {
455  default:
456  return AVERROR_BUG;
457  }
458  showwaves->pixstep = 4;
459  break;
460  }
461 
462  switch (showwaves->scale) {
463  case SCALE_LIN:
464  switch (showwaves->mode) {
465  case MODE_POINT:
466  case MODE_LINE:
467  case MODE_P2P: showwaves->get_h = get_lin_h; break;
468  case MODE_CENTERED_LINE: showwaves->get_h = get_lin_h2; break;
469  default:
470  return AVERROR_BUG;
471  }
472  break;
473  case SCALE_LOG:
474  switch (showwaves->mode) {
475  case MODE_POINT:
476  case MODE_LINE:
477  case MODE_P2P: showwaves->get_h = get_log_h; break;
478  case MODE_CENTERED_LINE: showwaves->get_h = get_log_h2; break;
479  default:
480  return AVERROR_BUG;
481  }
482  break;
483  case SCALE_SQRT:
484  switch (showwaves->mode) {
485  case MODE_POINT:
486  case MODE_LINE:
487  case MODE_P2P: showwaves->get_h = get_sqrt_h; break;
488  case MODE_CENTERED_LINE: showwaves->get_h = get_sqrt_h2; break;
489  default:
490  return AVERROR_BUG;
491  }
492  break;
493  case SCALE_CBRT:
494  switch (showwaves->mode) {
495  case MODE_POINT:
496  case MODE_LINE:
497  case MODE_P2P: showwaves->get_h = get_cbrt_h; break;
498  case MODE_CENTERED_LINE: showwaves->get_h = get_cbrt_h2; break;
499  default:
500  return AVERROR_BUG;
501  }
502  break;
503  }
504 
505  showwaves->fg = av_malloc_array(nb_channels, 4 * sizeof(*showwaves->fg));
506  if (!showwaves->fg)
507  return AVERROR(ENOMEM);
508 
509  colors = av_strdup(showwaves->colors);
510  if (!colors)
511  return AVERROR(ENOMEM);
512 
513  if (showwaves->draw_mode == DRAW_SCALE) {
514  /* multiplication factor, pre-computed to avoid in-loop divisions */
515  x = 255 / ((showwaves->split_channels ? 1 : nb_channels) * showwaves->n);
516  } else {
517  x = 255;
518  }
519  if (outlink->format == AV_PIX_FMT_RGBA) {
520  uint8_t fg[4] = { 0xff, 0xff, 0xff, 0xff };
521 
522  for (ch = 0; ch < nb_channels; ch++) {
523  char *color;
524 
525  color = av_strtok(ch == 0 ? colors : NULL, " |", &saveptr);
526  if (color)
527  av_parse_color(fg, color, -1, ctx);
528  showwaves->fg[4*ch + 0] = fg[0] * x / 255.;
529  showwaves->fg[4*ch + 1] = fg[1] * x / 255.;
530  showwaves->fg[4*ch + 2] = fg[2] * x / 255.;
531  showwaves->fg[4*ch + 3] = fg[3] * x / 255.;
532  }
533  } else {
534  for (ch = 0; ch < nb_channels; ch++)
535  showwaves->fg[4 * ch + 0] = x;
536  }
537  av_free(colors);
538 
539  return 0;
540 }
541 
542 inline static int push_frame(AVFilterLink *outlink)
543 {
544  AVFilterContext *ctx = outlink->src;
545  AVFilterLink *inlink = ctx->inputs[0];
546  ShowWavesContext *showwaves = outlink->src->priv;
547  int nb_channels = inlink->channels;
548  int ret, i;
549 
550  ret = ff_filter_frame(outlink, showwaves->outpicref);
551  showwaves->outpicref = NULL;
552  showwaves->buf_idx = 0;
553  for (i = 0; i < nb_channels; i++)
554  showwaves->buf_idy[i] = 0;
555  return ret;
556 }
557 
558 static int push_single_pic(AVFilterLink *outlink)
559 {
560  AVFilterContext *ctx = outlink->src;
561  AVFilterLink *inlink = ctx->inputs[0];
562  ShowWavesContext *showwaves = ctx->priv;
563  int64_t n = 0, column_max_samples = showwaves->total_samples / outlink->w;
564  int64_t remaining_samples = showwaves->total_samples - (column_max_samples * outlink->w);
565  int64_t last_column_samples = column_max_samples + remaining_samples;
566  AVFrame *out = showwaves->outpicref;
567  struct frame_node *node;
568  const int nb_channels = inlink->channels;
569  const int ch_height = showwaves->split_channels ? outlink->h / nb_channels : outlink->h;
570  const int linesize = out->linesize[0];
571  const int pixstep = showwaves->pixstep;
572  int col = 0;
573  int64_t *sum = showwaves->sum;
574 
575  if (column_max_samples == 0) {
576  av_log(ctx, AV_LOG_ERROR, "Too few samples\n");
577  return AVERROR(EINVAL);
578  }
579 
580  av_log(ctx, AV_LOG_DEBUG, "Create frame averaging %"PRId64" samples per column\n", column_max_samples);
581 
582  memset(sum, 0, nb_channels);
583 
584  for (node = showwaves->audio_frames; node; node = node->next) {
585  int i;
586  const AVFrame *frame = node->frame;
587  const int16_t *p = (const int16_t *)frame->data[0];
588 
589  for (i = 0; i < frame->nb_samples; i++) {
590  int64_t max_samples = col == outlink->w - 1 ? last_column_samples: column_max_samples;
591  int ch;
592 
593  for (ch = 0; ch < nb_channels; ch++)
594  sum[ch] += abs(p[ch + i*nb_channels]) << 1;
595  n++;
596  if (n == max_samples) {
597  for (ch = 0; ch < nb_channels; ch++) {
598  int16_t sample = sum[ch] / max_samples;
599  uint8_t *buf = out->data[0] + col * pixstep;
600  int h;
601 
602  if (showwaves->split_channels)
603  buf += ch*ch_height*linesize;
604  av_assert0(col < outlink->w);
605  h = showwaves->get_h(sample, ch_height);
606  showwaves->draw_sample(buf, ch_height, linesize, &showwaves->buf_idy[ch], &showwaves->fg[ch * 4], h);
607  sum[ch] = 0;
608  }
609  col++;
610  n = 0;
611  }
612  }
613  }
614 
615  return push_frame(outlink);
616 }
617 
618 
619 static int request_frame(AVFilterLink *outlink)
620 {
621  ShowWavesContext *showwaves = outlink->src->priv;
622  AVFilterLink *inlink = outlink->src->inputs[0];
623  int ret;
624 
626  if (ret == AVERROR_EOF && showwaves->outpicref) {
627  if (showwaves->single_pic)
628  push_single_pic(outlink);
629  else
630  push_frame(outlink);
631  }
632 
633  return ret;
634 }
635 
636 static int alloc_out_frame(ShowWavesContext *showwaves, const int16_t *p,
637  const AVFilterLink *inlink, AVFilterLink *outlink,
638  const AVFrame *in)
639 {
640  if (!showwaves->outpicref) {
641  int j;
642  AVFrame *out = showwaves->outpicref =
643  ff_get_video_buffer(outlink, outlink->w, outlink->h);
644  if (!out)
645  return AVERROR(ENOMEM);
646  out->width = outlink->w;
647  out->height = outlink->h;
648  out->pts = in->pts + av_rescale_q((p - (int16_t *)in->data[0]) / inlink->channels,
649  av_make_q(1, inlink->sample_rate),
650  outlink->time_base);
651  for (j = 0; j < outlink->h; j++)
652  memset(out->data[0] + j*out->linesize[0], 0, outlink->w * showwaves->pixstep);
653  }
654  return 0;
655 }
656 
658 {
659  ShowWavesContext *showwaves = ctx->priv;
660 
661  if (!strcmp(ctx->filter->name, "showwavespic")) {
662  showwaves->single_pic = 1;
663  showwaves->mode = MODE_CENTERED_LINE;
664  }
665 
666  return 0;
667 }
668 
669 #if CONFIG_SHOWWAVES_FILTER
670 
671 static int showwaves_filter_frame(AVFilterLink *inlink, AVFrame *insamples)
672 {
673  AVFilterContext *ctx = inlink->dst;
674  AVFilterLink *outlink = ctx->outputs[0];
675  ShowWavesContext *showwaves = ctx->priv;
676  const int nb_samples = insamples->nb_samples;
677  AVFrame *outpicref = showwaves->outpicref;
678  int16_t *p = (int16_t *)insamples->data[0];
679  int nb_channels = inlink->channels;
680  int i, j, ret = 0;
681  const int pixstep = showwaves->pixstep;
682  const int n = showwaves->n;
683  const int ch_height = showwaves->split_channels ? outlink->h / nb_channels : outlink->h;
684 
685  /* draw data in the buffer */
686  for (i = 0; i < nb_samples; i++) {
687 
688  ret = alloc_out_frame(showwaves, p, inlink, outlink, insamples);
689  if (ret < 0)
690  goto end;
691  outpicref = showwaves->outpicref;
692 
693  for (j = 0; j < nb_channels; j++) {
694  uint8_t *buf = outpicref->data[0] + showwaves->buf_idx * pixstep;
695  const int linesize = outpicref->linesize[0];
696  int h;
697 
698  if (showwaves->split_channels)
699  buf += j*ch_height*linesize;
700  h = showwaves->get_h(*p++, ch_height);
701  showwaves->draw_sample(buf, ch_height, linesize,
702  &showwaves->buf_idy[j], &showwaves->fg[j * 4], h);
703  }
704 
705  showwaves->sample_count_mod++;
706  if (showwaves->sample_count_mod == n) {
707  showwaves->sample_count_mod = 0;
708  showwaves->buf_idx++;
709  }
710  if (showwaves->buf_idx == showwaves->w ||
711  (ff_outlink_get_status(inlink) && i == nb_samples - 1))
712  if ((ret = push_frame(outlink)) < 0)
713  break;
714  outpicref = showwaves->outpicref;
715  }
716 
717 end:
718  av_frame_free(&insamples);
719  return ret;
720 }
721 
722 static int activate(AVFilterContext *ctx)
723 {
724  AVFilterLink *inlink = ctx->inputs[0];
725  AVFilterLink *outlink = ctx->outputs[0];
726  ShowWavesContext *showwaves = ctx->priv;
727  AVFrame *in;
728  const int nb_samples = showwaves->n * outlink->w;
729  int ret;
730 
732 
733  ret = ff_inlink_consume_samples(inlink, nb_samples, nb_samples, &in);
734  if (ret < 0)
735  return ret;
736  if (ret > 0)
737  return showwaves_filter_frame(inlink, in);
738 
741 
742  return FFERROR_NOT_READY;
743 }
744 
745 static const AVFilterPad showwaves_inputs[] = {
746  {
747  .name = "default",
748  .type = AVMEDIA_TYPE_AUDIO,
749  },
750  { NULL }
751 };
752 
753 static const AVFilterPad showwaves_outputs[] = {
754  {
755  .name = "default",
756  .type = AVMEDIA_TYPE_VIDEO,
757  .config_props = config_output,
758  },
759  { NULL }
760 };
761 
763  .name = "showwaves",
764  .description = NULL_IF_CONFIG_SMALL("Convert input audio to a video output."),
765  .init = init,
766  .uninit = uninit,
767  .query_formats = query_formats,
768  .priv_size = sizeof(ShowWavesContext),
769  .inputs = showwaves_inputs,
770  .activate = activate,
771  .outputs = showwaves_outputs,
772  .priv_class = &showwaves_class,
773 };
774 
775 #endif // CONFIG_SHOWWAVES_FILTER
776 
777 #if CONFIG_SHOWWAVESPIC_FILTER
778 
779 #define OFFSET(x) offsetof(ShowWavesContext, x)
780 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
781 
782 static const AVOption showwavespic_options[] = {
783  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
784  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
785  { "split_channels", "draw channels separately", OFFSET(split_channels), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, FLAGS },
786  { "colors", "set channels colors", OFFSET(colors), AV_OPT_TYPE_STRING, {.str = "red|green|blue|yellow|orange|lime|pink|magenta|brown" }, 0, 0, FLAGS },
787  { "scale", "set amplitude scale", OFFSET(scale), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, SCALE_NB-1, FLAGS, .unit="scale" },
788  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LIN}, .flags=FLAGS, .unit="scale"},
789  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LOG}, .flags=FLAGS, .unit="scale"},
790  { "sqrt", "square root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_SQRT}, .flags=FLAGS, .unit="scale"},
791  { "cbrt", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_CBRT}, .flags=FLAGS, .unit="scale"},
792  { "draw", "set draw mode", OFFSET(draw_mode), AV_OPT_TYPE_INT, {.i64 = DRAW_SCALE}, 0, DRAW_NB-1, FLAGS, .unit="draw" },
793  { "scale", "scale pixel values for each drawn sample", 0, AV_OPT_TYPE_CONST, {.i64=DRAW_SCALE}, .flags=FLAGS, .unit="draw"},
794  { "full", "draw every pixel for sample directly", 0, AV_OPT_TYPE_CONST, {.i64=DRAW_FULL}, .flags=FLAGS, .unit="draw"},
795  { NULL }
796 };
797 
798 AVFILTER_DEFINE_CLASS(showwavespic);
799 
800 static int showwavespic_config_input(AVFilterLink *inlink)
801 {
802  AVFilterContext *ctx = inlink->dst;
803  ShowWavesContext *showwaves = ctx->priv;
804 
805  if (showwaves->single_pic) {
806  showwaves->sum = av_mallocz_array(inlink->channels, sizeof(*showwaves->sum));
807  if (!showwaves->sum)
808  return AVERROR(ENOMEM);
809  }
810 
811  return 0;
812 }
813 
814 static int showwavespic_filter_frame(AVFilterLink *inlink, AVFrame *insamples)
815 {
816  AVFilterContext *ctx = inlink->dst;
817  AVFilterLink *outlink = ctx->outputs[0];
818  ShowWavesContext *showwaves = ctx->priv;
819  int16_t *p = (int16_t *)insamples->data[0];
820  int ret = 0;
821 
822  if (showwaves->single_pic) {
823  struct frame_node *f;
824 
825  ret = alloc_out_frame(showwaves, p, inlink, outlink, insamples);
826  if (ret < 0)
827  goto end;
828 
829  /* queue the audio frame */
830  f = av_malloc(sizeof(*f));
831  if (!f) {
832  ret = AVERROR(ENOMEM);
833  goto end;
834  }
835  f->frame = insamples;
836  f->next = NULL;
837  if (!showwaves->last_frame) {
838  showwaves->audio_frames =
839  showwaves->last_frame = f;
840  } else {
841  showwaves->last_frame->next = f;
842  showwaves->last_frame = f;
843  }
844  showwaves->total_samples += insamples->nb_samples;
845 
846  return 0;
847  }
848 
849 end:
850  av_frame_free(&insamples);
851  return ret;
852 }
853 
854 static const AVFilterPad showwavespic_inputs[] = {
855  {
856  .name = "default",
857  .type = AVMEDIA_TYPE_AUDIO,
858  .config_props = showwavespic_config_input,
859  .filter_frame = showwavespic_filter_frame,
860  },
861  { NULL }
862 };
863 
864 static const AVFilterPad showwavespic_outputs[] = {
865  {
866  .name = "default",
867  .type = AVMEDIA_TYPE_VIDEO,
868  .config_props = config_output,
869  .request_frame = request_frame,
870  },
871  { NULL }
872 };
873 
875  .name = "showwavespic",
876  .description = NULL_IF_CONFIG_SMALL("Convert input audio to a video output single picture."),
877  .init = init,
878  .uninit = uninit,
879  .query_formats = query_formats,
880  .priv_size = sizeof(ShowWavesContext),
881  .inputs = showwavespic_inputs,
882  .outputs = showwavespic_outputs,
883  .priv_class = &showwavespic_class,
884 };
885 
886 #endif // CONFIG_SHOWWAVESPIC_FILTER
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
SCALE_SQRT
@ SCALE_SQRT
Definition: avf_showwaves.c:49
DRAW_SCALE
@ DRAW_SCALE
Definition: avf_showwaves.c:55
AVFilterChannelLayouts
A list of supported channel layouts.
Definition: formats.h:85
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
get_cbrt_h
static int get_cbrt_h(int16_t sample, int height)
Definition: avf_showwaves.c:206
out
FILE * out
Definition: movenc.c:54
FFSWAP
#define FFSWAP(type, a, b)
Definition: common.h:99
color
Definition: vf_paletteuse.c:588
n
int n
Definition: avisynth_c.h:760
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1080
sample_fmts
static enum AVSampleFormat sample_fmts[]
Definition: adpcmenc.c:686
ff_channel_layouts_ref
int ff_channel_layouts_ref(AVFilterChannelLayouts *f, AVFilterChannelLayouts **ref)
Add *ref as a new reference to f.
Definition: formats.c:435
layouts
enum MovChannelLayoutTag * layouts
Definition: mov_chan.c:434
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:55
av_parse_color
int av_parse_color(uint8_t *rgba_color, const char *color_string, int slen, void *log_ctx)
Put the RGBA values that correspond to color_string in rgba_color.
Definition: parseutils.c:354
FFERROR_NOT_READY
return FFERROR_NOT_READY
Definition: filter_design.txt:204
AV_OPT_TYPE_VIDEO_RATE
@ AV_OPT_TYPE_VIDEO_RATE
offset must point to AVRational
Definition: opt.h:236
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
frame_node::frame
AVFrame * frame
Definition: avf_showwaves.c:61
ShowWavesContext::get_h
int(* get_h)(int16_t sample, int height)
Definition: avf_showwaves.c:82
ch
uint8_t pi<< 24) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_U8,(uint64_t)((*(const uint8_t *) pi - 0x80U))<< 56) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16,(*(const int16_t *) pi >>8)+0x80) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_S16,(uint64_t)(*(const int16_t *) pi)<< 48) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32,(*(const int32_t *) pi >>24)+0x80) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_S32,(uint64_t)(*(const int32_t *) pi)<< 32) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S64,(*(const int64_t *) pi >>56)+0x80) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S64, *(const int64_t *) pi *(1.0f/(INT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S64, *(const int64_t *) pi *(1.0/(INT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_FLT, llrintf(*(const float *) pi *(INT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_DBL, llrint(*(const double *) pi *(INT64_C(1)<< 63))) #define FMT_PAIR_FUNC(out, in) static conv_func_type *const fmt_pair_to_conv_functions[AV_SAMPLE_FMT_NB *AV_SAMPLE_FMT_NB]={ FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S64), };static void cpy1(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, len);} static void cpy2(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 2 *len);} static void cpy4(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 4 *len);} static void cpy8(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 8 *len);} AudioConvert *swri_audio_convert_alloc(enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, const int *ch_map, int flags) { AudioConvert *ctx;conv_func_type *f=fmt_pair_to_conv_functions[av_get_packed_sample_fmt(out_fmt)+AV_SAMPLE_FMT_NB *av_get_packed_sample_fmt(in_fmt)];if(!f) return NULL;ctx=av_mallocz(sizeof(*ctx));if(!ctx) return NULL;if(channels==1){ in_fmt=av_get_planar_sample_fmt(in_fmt);out_fmt=av_get_planar_sample_fmt(out_fmt);} ctx->channels=channels;ctx->conv_f=f;ctx->ch_map=ch_map;if(in_fmt==AV_SAMPLE_FMT_U8||in_fmt==AV_SAMPLE_FMT_U8P) memset(ctx->silence, 0x80, sizeof(ctx->silence));if(out_fmt==in_fmt &&!ch_map) { switch(av_get_bytes_per_sample(in_fmt)){ case 1:ctx->simd_f=cpy1;break;case 2:ctx->simd_f=cpy2;break;case 4:ctx->simd_f=cpy4;break;case 8:ctx->simd_f=cpy8;break;} } if(HAVE_X86ASM &&1) swri_audio_convert_init_x86(ctx, out_fmt, in_fmt, channels);if(ARCH_ARM) swri_audio_convert_init_arm(ctx, out_fmt, in_fmt, channels);if(ARCH_AARCH64) swri_audio_convert_init_aarch64(ctx, out_fmt, in_fmt, channels);return ctx;} void swri_audio_convert_free(AudioConvert **ctx) { av_freep(ctx);} int swri_audio_convert(AudioConvert *ctx, AudioData *out, AudioData *in, int len) { int ch;int off=0;const int os=(out->planar ? 1 :out->ch_count) *out->bps;unsigned misaligned=0;av_assert0(ctx->channels==out->ch_count);if(ctx->in_simd_align_mask) { int planes=in->planar ? in->ch_count :1;unsigned m=0;for(ch=0;ch< planes;ch++) m|=(intptr_t) in->ch[ch];misaligned|=m &ctx->in_simd_align_mask;} if(ctx->out_simd_align_mask) { int planes=out->planar ? out->ch_count :1;unsigned m=0;for(ch=0;ch< planes;ch++) m|=(intptr_t) out->ch[ch];misaligned|=m &ctx->out_simd_align_mask;} if(ctx->simd_f &&!ctx->ch_map &&!misaligned){ off=len &~15;av_assert1(off >=0);av_assert1(off<=len);av_assert2(ctx->channels==SWR_CH_MAX||!in->ch[ctx->channels]);if(off >0){ if(out->planar==in->planar){ int planes=out->planar ? out->ch_count :1;for(ch=0;ch< planes;ch++){ ctx->simd_f(out-> ch ch
Definition: audioconvert.c:56
frame_node::next
struct frame_node * next
Definition: avf_showwaves.c:62
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
draw_sample_point_rgba_scale
static void draw_sample_point_rgba_scale(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:216
ShowWavesContext::buf_idy
int16_t * buf_idy
Definition: avf_showwaves.c:71
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
end
static av_cold int end(AVCodecContext *avctx)
Definition: avrndec.c:90
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:26
w
uint8_t w
Definition: llviddspenc.c:38
AVOption
AVOption.
Definition: opt.h:246
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:407
av_mallocz_array
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:191
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
get_cbrt_h2
static int get_cbrt_h2(int16_t sample, int height)
Definition: avf_showwaves.c:211
ShowWavesContext::buf_idx
int buf_idx
Definition: avf_showwaves.c:70
ShowWavesContext::w
int w
Definition: avf_showwaves.c:67
showwaves_options
static const AVOption showwaves_options[]
Definition: avf_showwaves.c:97
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(showwaves)
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:148
frame_node
Definition: avf_showwaves.c:60
video.h
get_sqrt_h2
static int get_sqrt_h2(int16_t sample, int height)
Definition: avf_showwaves.c:201
FF_FILTER_FORWARD_STATUS_BACK
#define FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink)
Forward the status on an output link to an input link.
Definition: filters.h:199
DRAW_FULL
@ DRAW_FULL
Definition: avf_showwaves.c:56
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
draw_sample_line_rgba_scale
static void draw_sample_line_rgba_scale(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:240
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:31
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
draw_sample_cline_rgba_full
static void draw_sample_cline_rgba_full(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:340
get_log_h2
static int get_log_h2(int16_t sample, int height)
Definition: avf_showwaves.c:191
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:353
MODE_NB
@ MODE_NB
Definition: avf_showwaves.c:43
get_sqrt_h
static int get_sqrt_h(int16_t sample, int height)
Definition: avf_showwaves.c:196
start
void INT64 start
Definition: avisynth_c.h:767
get_lin_h2
static int get_lin_h2(int16_t sample, int height)
Definition: avf_showwaves.c:181
FFSIGN
#define FFSIGN(a)
Definition: common.h:73
SCALE_CBRT
@ SCALE_CBRT
Definition: avf_showwaves.c:50
get_lin_h
static int get_lin_h(int16_t sample, int height)
Definition: avf_showwaves.c:176
ShowWavesContext::sample_count_mod
int sample_count_mod
Definition: avf_showwaves.c:75
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
cbrt
#define cbrt
Definition: tablegen.h:35
draw_sample_point_gray
static void draw_sample_point_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:355
draw_sample_p2p_gray
static void draw_sample_p2p_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:376
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
buf
void * buf
Definition: avisynth_c.h:766
av_cold
#define av_cold
Definition: attributes.h:84
DRAW_NB
@ DRAW_NB
Definition: avf_showwaves.c:57
FLAGS
#define FLAGS
Definition: avf_showwaves.c:95
ShowWavesContext::sum
int64_t * sum
Definition: avf_showwaves.c:91
ShowWavesContext::last_frame
struct frame_node * last_frame
Definition: avf_showwaves.c:89
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:440
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
ShowWavesContext::audio_frames
struct frame_node * audio_frames
Definition: avf_showwaves.c:88
av_strtok
char * av_strtok(char *s, const char *delim, char **saveptr)
Split the string into several tokens which can be accessed by successive calls to av_strtok().
Definition: avstring.c:184
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
filters.h
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:275
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
ctx
AVFormatContext * ctx
Definition: movenc.c:48
init
static av_cold int init(AVFilterContext *ctx)
Definition: avf_showwaves.c:657
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
ShowWavesContext::rate
AVRational rate
Definition: avf_showwaves.c:68
f
#define f(width, name)
Definition: cbs_vp9.c:255
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
FFABS
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:72
if
if(ret)
Definition: filter_design.txt:179
config_output
static int config_output(AVFilterLink *outlink)
Definition: avf_showwaves.c:406
push_frame
static int push_frame(AVFilterLink *outlink)
Definition: avf_showwaves.c:542
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
ShowWavesContext::scale
int scale
ShowWavesScale.
Definition: avf_showwaves.c:77
ff_inlink_consume_samples
int ff_inlink_consume_samples(AVFilterLink *link, unsigned min, unsigned max, AVFrame **rframe)
Take samples from the link's FIFO and update the link's stats.
Definition: avfilter.c:1500
NULL
#define NULL
Definition: coverity.c:32
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
get_log_h
static int get_log_h(int16_t sample, int height)
Definition: avf_showwaves.c:186
activate
filter_frame For filters that do not use the activate() callback
ShowWavesContext::single_pic
int single_pic
Definition: avf_showwaves.c:87
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:233
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
ShowWavesContext::mode
int mode
ShowWavesMode.
Definition: avf_showwaves.c:76
parseutils.h
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: avf_showwaves.c:123
ShowWavesContext::total_samples
int64_t total_samples
Definition: avf_showwaves.c:90
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: avf_showwaves.c:619
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: avf_showwaves.c:145
abs
#define abs(x)
Definition: cuda_runtime.h:35
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
MODE_POINT
@ MODE_POINT
Definition: avf_showwaves.c:39
ShowWavesContext::draw_mode
int draw_mode
ShowWavesDrawMode.
Definition: avf_showwaves.c:78
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
draw_sample_cline_gray
static void draw_sample_cline_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:395
ff_avf_showwavespic
AVFilter ff_avf_showwavespic
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:188
MODE_CENTERED_LINE
@ MODE_CENTERED_LINE
Definition: avf_showwaves.c:42
draw_sample_p2p_rgba_full
static void draw_sample_p2p_rgba_full(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:300
FFMAX
#define FFMAX(a, b)
Definition: common.h:94
AV_SAMPLE_FMT_NONE
@ AV_SAMPLE_FMT_NONE
Definition: samplefmt.h:59
sample
#define sample
Definition: flacdsp_template.c:44
color
static const uint32_t color[16+AV_CLASS_CATEGORY_NB]
Definition: log.c:92
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
ShowWavesContext::n
int n
Definition: avf_showwaves.c:73
SCALE_LIN
@ SCALE_LIN
Definition: avf_showwaves.c:47
height
#define height
FF_FILTER_FORWARD_WANTED
FF_FILTER_FORWARD_WANTED(outlink, inlink)
ff_all_channel_layouts
AVFilterChannelLayouts * ff_all_channel_layouts(void)
Construct an empty AVFilterChannelLayouts/AVFilterFormats struct – representing any channel layout (w...
Definition: formats.c:401
MODE_P2P
@ MODE_P2P
Definition: avf_showwaves.c:41
ShowWavesContext::outpicref
AVFrame * outpicref
Definition: avf_showwaves.c:72
internal.h
ShowWavesContext::h
int h
Definition: avf_showwaves.c:67
ShowWavesScale
ShowWavesScale
Definition: avf_showwaves.c:46
in
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
Definition: audio_convert.c:326
push_single_pic
static int push_single_pic(AVFilterLink *outlink)
Definition: avf_showwaves.c:558
AVFrame::nb_samples
int nb_samples
number of audio samples (per channel) described by this frame
Definition: frame.h:361
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
AVSampleFormat
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:58
draw_sample_line_gray
static void draw_sample_line_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:363
uint8_t
uint8_t
Definition: audio_convert.c:194
AV_SAMPLE_FMT_S16
@ AV_SAMPLE_FMT_S16
signed 16 bits
Definition: samplefmt.h:61
draw_sample_p2p_rgba_scale
static void draw_sample_p2p_rgba_scale(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:274
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:60
av_rescale
int64_t av_rescale(int64_t a, int64_t b, int64_t c)
Rescale a 64-bit integer with rounding to nearest.
Definition: mathematics.c:129
AVFilter
Filter definition.
Definition: avfilter.h:144
ret
ret
Definition: filter_design.txt:187
ShowWavesContext::draw_sample
void(* draw_sample)(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:83
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
ShowWavesContext::fg
uint8_t * fg
Definition: avf_showwaves.c:80
SCALE_LOG
@ SCALE_LOG
Definition: avf_showwaves.c:48
ff_all_samplerates
AVFilterFormats * ff_all_samplerates(void)
Definition: formats.c:395
channel_layout.h
mode
mode
Definition: ebur128.h:83
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:223
avfilter.h
draw_sample_cline_rgba_scale
static void draw_sample_cline_rgba_scale(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:326
ShowWavesContext::split_channels
int split_channels
Definition: avf_showwaves.c:79
ShowWavesContext::colors
char * colors
Definition: avf_showwaves.c:69
MODE_LINE
@ MODE_LINE
Definition: avf_showwaves.c:40
alloc_out_frame
static int alloc_out_frame(ShowWavesContext *showwaves, const int16_t *p, const AVFilterLink *inlink, AVFilterLink *outlink, const AVFrame *in)
Definition: avf_showwaves.c:636
ff_outlink_get_status
int ff_outlink_get_status(AVFilterLink *link)
Get the status on an output link.
Definition: avfilter.c:1630
AVFilterContext
An instance of a filter.
Definition: avfilter.h:338
ShowWavesMode
ShowWavesMode
Definition: avf_showwaves.c:38
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:251
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
audio.h
OFFSET
#define OFFSET(x)
Definition: avf_showwaves.c:94
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
FF_FILTER_FORWARD_STATUS
FF_FILTER_FORWARD_STATUS(inlink, outlink)
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:240
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
h
h
Definition: vp9dsp_template.c:2038
ShowWavesDrawMode
ShowWavesDrawMode
Definition: avf_showwaves.c:54
avstring.h
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:227
ff_avf_showwaves
AVFilter ff_avf_showwaves
int
int
Definition: ffmpeg_filter.c:191
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:232
ShowWavesContext::pixstep
int pixstep
Definition: avf_showwaves.c:74
ShowWavesContext
Definition: avf_showwaves.c:65
SCALE_NB
@ SCALE_NB
Definition: avf_showwaves.c:51
draw_sample_line_rgba_full
static void draw_sample_line_rgba_full(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:257
draw_sample_point_rgba_full
static void draw_sample_point_rgba_full(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:228
nb_channels
int nb_channels
Definition: channel_layout.c:76