FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
avf_showwaves.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2012 Stefano Sabatini
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * audio to video multimedia filter
24  */
25 
26 #include "libavutil/avassert.h"
27 #include "libavutil/avstring.h"
29 #include "libavutil/opt.h"
30 #include "libavutil/parseutils.h"
31 #include "avfilter.h"
32 #include "formats.h"
33 #include "audio.h"
34 #include "video.h"
35 #include "internal.h"
36 
43 };
44 
51 };
52 
53 struct frame_node {
55  struct frame_node *next;
56 };
57 
58 typedef struct {
59  const AVClass *class;
60  int w, h;
62  char *colors;
63  int buf_idx;
64  int16_t *buf_idy; /* y coordinate of previous sample for each channel */
66  int n;
67  int pixstep;
69  int mode; ///< ShowWavesMode
70  int scale; ///< ShowWavesScale
73 
74  int (*get_h)(int16_t sample, int height);
75  void (*draw_sample)(uint8_t *buf, int height, int linesize,
76  int16_t *prev_y, const uint8_t color[4], int h);
77 
78  /* single picture */
82  int64_t total_samples;
83  int64_t *sum; /* abs sum of the samples per channel */
85 
86 #define OFFSET(x) offsetof(ShowWavesContext, x)
87 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
88 
89 static const AVOption showwaves_options[] = {
90  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
91  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
92  { "mode", "select display mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=MODE_POINT}, 0, MODE_NB-1, FLAGS, "mode"},
93  { "point", "draw a point for each sample", 0, AV_OPT_TYPE_CONST, {.i64=MODE_POINT}, .flags=FLAGS, .unit="mode"},
94  { "line", "draw a line for each sample", 0, AV_OPT_TYPE_CONST, {.i64=MODE_LINE}, .flags=FLAGS, .unit="mode"},
95  { "p2p", "draw a line between samples", 0, AV_OPT_TYPE_CONST, {.i64=MODE_P2P}, .flags=FLAGS, .unit="mode"},
96  { "cline", "draw a centered line for each sample", 0, AV_OPT_TYPE_CONST, {.i64=MODE_CENTERED_LINE}, .flags=FLAGS, .unit="mode"},
97  { "n", "set how many samples to show in the same point", OFFSET(n), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, FLAGS },
98  { "rate", "set video rate", OFFSET(rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },
99  { "r", "set video rate", OFFSET(rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },
100  { "split_channels", "draw channels separately", OFFSET(split_channels), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, FLAGS },
101  { "colors", "set channels colors", OFFSET(colors), AV_OPT_TYPE_STRING, {.str = "red|green|blue|yellow|orange|lime|pink|magenta|brown" }, 0, 0, FLAGS },
102  { "scale", "set amplitude scale", OFFSET(scale), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, SCALE_NB-1, FLAGS, .unit="scale" },
103  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LIN}, .flags=FLAGS, .unit="scale"},
104  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LOG}, .flags=FLAGS, .unit="scale"},
105  { "sqrt", "square root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_SQRT}, .flags=FLAGS, .unit="scale"},
106  { "cbrt", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_CBRT}, .flags=FLAGS, .unit="scale"},
107  { NULL }
108 };
109 
110 AVFILTER_DEFINE_CLASS(showwaves);
111 
113 {
114  ShowWavesContext *showwaves = ctx->priv;
115 
116  av_frame_free(&showwaves->outpicref);
117  av_freep(&showwaves->buf_idy);
118  av_freep(&showwaves->fg);
119 
120  if (showwaves->single_pic) {
121  struct frame_node *node = showwaves->audio_frames;
122  while (node) {
123  struct frame_node *tmp = node;
124 
125  node = node->next;
126  av_frame_free(&tmp->frame);
127  av_freep(&tmp);
128  }
129  av_freep(&showwaves->sum);
130  showwaves->last_frame = NULL;
131  }
132 }
133 
135 {
138  AVFilterLink *inlink = ctx->inputs[0];
139  AVFilterLink *outlink = ctx->outputs[0];
142  int ret;
143 
144  /* set input audio formats */
145  formats = ff_make_format_list(sample_fmts);
146  if ((ret = ff_formats_ref(formats, &inlink->out_formats)) < 0)
147  return ret;
148 
149  layouts = ff_all_channel_layouts();
150  if ((ret = ff_channel_layouts_ref(layouts, &inlink->out_channel_layouts)) < 0)
151  return ret;
152 
153  formats = ff_all_samplerates();
154  if ((ret = ff_formats_ref(formats, &inlink->out_samplerates)) < 0)
155  return ret;
156 
157  /* set output video format */
158  formats = ff_make_format_list(pix_fmts);
159  if ((ret = ff_formats_ref(formats, &outlink->in_formats)) < 0)
160  return ret;
161 
162  return 0;
163 }
164 
165 static int get_lin_h(int16_t sample, int height)
166 {
167  return height/2 - av_rescale(sample, height/2, INT16_MAX);
168 }
169 
170 static int get_lin_h2(int16_t sample, int height)
171 {
172  return av_rescale(FFABS(sample), height, INT16_MAX);
173 }
174 
175 static int get_log_h(int16_t sample, int height)
176 {
177  return height/2 - FFSIGN(sample) * (log10(1 + FFABS(sample)) * (height/2) / log10(1 + INT16_MAX));
178 }
179 
180 static int get_log_h2(int16_t sample, int height)
181 {
182  return log10(1 + FFABS(sample)) * height / log10(1 + INT16_MAX);
183 }
184 
185 static int get_sqrt_h(int16_t sample, int height)
186 {
187  return height/2 - FFSIGN(sample) * (sqrt(FFABS(sample)) * (height/2) / sqrt(INT16_MAX));
188 }
189 
190 static int get_sqrt_h2(int16_t sample, int height)
191 {
192  return sqrt(FFABS(sample)) * height / sqrt(INT16_MAX);
193 }
194 
195 static int get_cbrt_h(int16_t sample, int height)
196 {
197  return height/2 - FFSIGN(sample) * (cbrt(FFABS(sample)) * (height/2) / cbrt(INT16_MAX));
198 }
199 
200 static int get_cbrt_h2(int16_t sample, int height)
201 {
202  return cbrt(FFABS(sample)) * height / cbrt(INT16_MAX);
203 }
204 
205 static void draw_sample_point_rgba(uint8_t *buf, int height, int linesize,
206  int16_t *prev_y,
207  const uint8_t color[4], int h)
208 {
209  if (h >= 0 && h < height) {
210  buf[h * linesize + 0] += color[0];
211  buf[h * linesize + 1] += color[1];
212  buf[h * linesize + 2] += color[2];
213  buf[h * linesize + 3] += color[3];
214  }
215 }
216 
217 static void draw_sample_line_rgba(uint8_t *buf, int height, int linesize,
218  int16_t *prev_y,
219  const uint8_t color[4], int h)
220 {
221  int k;
222  int start = height/2;
223  int end = av_clip(h, 0, height-1);
224  if (start > end)
225  FFSWAP(int16_t, start, end);
226  for (k = start; k < end; k++) {
227  buf[k * linesize + 0] += color[0];
228  buf[k * linesize + 1] += color[1];
229  buf[k * linesize + 2] += color[2];
230  buf[k * linesize + 3] += color[3];
231  }
232 }
233 
234 static void draw_sample_p2p_rgba(uint8_t *buf, int height, int linesize,
235  int16_t *prev_y,
236  const uint8_t color[4], int h)
237 {
238  int k;
239  if (h >= 0 && h < height) {
240  buf[h * linesize + 0] += color[0];
241  buf[h * linesize + 1] += color[1];
242  buf[h * linesize + 2] += color[2];
243  buf[h * linesize + 3] += color[3];
244  if (*prev_y && h != *prev_y) {
245  int start = *prev_y;
246  int end = av_clip(h, 0, height-1);
247  if (start > end)
248  FFSWAP(int16_t, start, end);
249  for (k = start + 1; k < end; k++) {
250  buf[k * linesize + 0] += color[0];
251  buf[k * linesize + 1] += color[1];
252  buf[k * linesize + 2] += color[2];
253  buf[k * linesize + 3] += color[3];
254  }
255  }
256  }
257  *prev_y = h;
258 }
259 
260 static void draw_sample_cline_rgba(uint8_t *buf, int height, int linesize,
261  int16_t *prev_y,
262  const uint8_t color[4], int h)
263 {
264  int k;
265  const int start = (height - h) / 2;
266  const int end = start + h;
267  for (k = start; k < end; k++) {
268  buf[k * linesize + 0] += color[0];
269  buf[k * linesize + 1] += color[1];
270  buf[k * linesize + 2] += color[2];
271  buf[k * linesize + 3] += color[3];
272  }
273 }
274 
275 static void draw_sample_point_gray(uint8_t *buf, int height, int linesize,
276  int16_t *prev_y,
277  const uint8_t color[4], int h)
278 {
279  if (h >= 0 && h < height)
280  buf[h * linesize] += color[0];
281 }
282 
283 static void draw_sample_line_gray(uint8_t *buf, int height, int linesize,
284  int16_t *prev_y,
285  const uint8_t color[4], int h)
286 {
287  int k;
288  int start = height/2;
289  int end = av_clip(h, 0, height-1);
290  if (start > end)
291  FFSWAP(int16_t, start, end);
292  for (k = start; k < end; k++)
293  buf[k * linesize] += color[0];
294 }
295 
296 static void draw_sample_p2p_gray(uint8_t *buf, int height, int linesize,
297  int16_t *prev_y,
298  const uint8_t color[4], int h)
299 {
300  int k;
301  if (h >= 0 && h < height) {
302  buf[h * linesize] += color[0];
303  if (*prev_y && h != *prev_y) {
304  int start = *prev_y;
305  int end = av_clip(h, 0, height-1);
306  if (start > end)
307  FFSWAP(int16_t, start, end);
308  for (k = start + 1; k < end; k++)
309  buf[k * linesize] += color[0];
310  }
311  }
312  *prev_y = h;
313 }
314 
315 static void draw_sample_cline_gray(uint8_t *buf, int height, int linesize,
316  int16_t *prev_y,
317  const uint8_t color[4], int h)
318 {
319  int k;
320  const int start = (height - h) / 2;
321  const int end = start + h;
322  for (k = start; k < end; k++)
323  buf[k * linesize] += color[0];
324 }
325 
326 static int config_output(AVFilterLink *outlink)
327 {
328  AVFilterContext *ctx = outlink->src;
329  AVFilterLink *inlink = ctx->inputs[0];
330  ShowWavesContext *showwaves = ctx->priv;
331  int nb_channels = inlink->channels;
332  char *colors, *saveptr = NULL;
333  uint8_t x;
334  int ch;
335 
336  if (showwaves->single_pic)
337  showwaves->n = 1;
338 
339  if (!showwaves->n)
340  showwaves->n = FFMAX(1, ((double)inlink->sample_rate / (showwaves->w * av_q2d(showwaves->rate))) + 0.5);
341 
342  showwaves->buf_idx = 0;
343  if (!(showwaves->buf_idy = av_mallocz_array(nb_channels, sizeof(*showwaves->buf_idy)))) {
344  av_log(ctx, AV_LOG_ERROR, "Could not allocate showwaves buffer\n");
345  return AVERROR(ENOMEM);
346  }
347  outlink->w = showwaves->w;
348  outlink->h = showwaves->h;
349  outlink->sample_aspect_ratio = (AVRational){1,1};
350 
351  outlink->frame_rate = av_div_q((AVRational){inlink->sample_rate,showwaves->n},
352  (AVRational){showwaves->w,1});
353 
354  av_log(ctx, AV_LOG_VERBOSE, "s:%dx%d r:%f n:%d\n",
355  showwaves->w, showwaves->h, av_q2d(outlink->frame_rate), showwaves->n);
356 
357  switch (outlink->format) {
358  case AV_PIX_FMT_GRAY8:
359  switch (showwaves->mode) {
360  case MODE_POINT: showwaves->draw_sample = draw_sample_point_gray; break;
361  case MODE_LINE: showwaves->draw_sample = draw_sample_line_gray; break;
362  case MODE_P2P: showwaves->draw_sample = draw_sample_p2p_gray; break;
363  case MODE_CENTERED_LINE: showwaves->draw_sample = draw_sample_cline_gray; break;
364  default:
365  return AVERROR_BUG;
366  }
367  showwaves->pixstep = 1;
368  break;
369  case AV_PIX_FMT_RGBA:
370  switch (showwaves->mode) {
371  case MODE_POINT: showwaves->draw_sample = draw_sample_point_rgba; break;
372  case MODE_LINE: showwaves->draw_sample = draw_sample_line_rgba; break;
373  case MODE_P2P: showwaves->draw_sample = draw_sample_p2p_rgba; break;
374  case MODE_CENTERED_LINE: showwaves->draw_sample = draw_sample_cline_rgba; break;
375  default:
376  return AVERROR_BUG;
377  }
378  showwaves->pixstep = 4;
379  break;
380  }
381 
382  switch (showwaves->scale) {
383  case SCALE_LIN:
384  switch (showwaves->mode) {
385  case MODE_POINT:
386  case MODE_LINE:
387  case MODE_P2P: showwaves->get_h = get_lin_h; break;
388  case MODE_CENTERED_LINE: showwaves->get_h = get_lin_h2; break;
389  default:
390  return AVERROR_BUG;
391  }
392  break;
393  case SCALE_LOG:
394  switch (showwaves->mode) {
395  case MODE_POINT:
396  case MODE_LINE:
397  case MODE_P2P: showwaves->get_h = get_log_h; break;
398  case MODE_CENTERED_LINE: showwaves->get_h = get_log_h2; break;
399  default:
400  return AVERROR_BUG;
401  }
402  break;
403  case SCALE_SQRT:
404  switch (showwaves->mode) {
405  case MODE_POINT:
406  case MODE_LINE:
407  case MODE_P2P: showwaves->get_h = get_sqrt_h; break;
408  case MODE_CENTERED_LINE: showwaves->get_h = get_sqrt_h2; break;
409  default:
410  return AVERROR_BUG;
411  }
412  break;
413  case SCALE_CBRT:
414  switch (showwaves->mode) {
415  case MODE_POINT:
416  case MODE_LINE:
417  case MODE_P2P: showwaves->get_h = get_cbrt_h; break;
418  case MODE_CENTERED_LINE: showwaves->get_h = get_cbrt_h2; break;
419  default:
420  return AVERROR_BUG;
421  }
422  break;
423  }
424 
425  showwaves->fg = av_malloc_array(nb_channels, 4 * sizeof(*showwaves->fg));
426  if (!showwaves->fg)
427  return AVERROR(ENOMEM);
428 
429  colors = av_strdup(showwaves->colors);
430  if (!colors)
431  return AVERROR(ENOMEM);
432 
433  /* multiplication factor, pre-computed to avoid in-loop divisions */
434  x = 255 / ((showwaves->split_channels ? 1 : nb_channels) * showwaves->n);
435  if (outlink->format == AV_PIX_FMT_RGBA) {
436  uint8_t fg[4] = { 0xff, 0xff, 0xff, 0xff };
437 
438  for (ch = 0; ch < nb_channels; ch++) {
439  char *color;
440 
441  color = av_strtok(ch == 0 ? colors : NULL, " |", &saveptr);
442  if (color)
443  av_parse_color(fg, color, -1, ctx);
444  showwaves->fg[4*ch + 0] = fg[0] * x / 255.;
445  showwaves->fg[4*ch + 1] = fg[1] * x / 255.;
446  showwaves->fg[4*ch + 2] = fg[2] * x / 255.;
447  showwaves->fg[4*ch + 3] = fg[3] * x / 255.;
448  }
449  } else {
450  for (ch = 0; ch < nb_channels; ch++)
451  showwaves->fg[4 * ch + 0] = x;
452  }
453  av_free(colors);
454 
455  return 0;
456 }
457 
458 inline static int push_frame(AVFilterLink *outlink)
459 {
460  AVFilterContext *ctx = outlink->src;
461  AVFilterLink *inlink = ctx->inputs[0];
462  ShowWavesContext *showwaves = outlink->src->priv;
463  int nb_channels = inlink->channels;
464  int ret, i;
465 
466  ret = ff_filter_frame(outlink, showwaves->outpicref);
467  showwaves->outpicref = NULL;
468  showwaves->buf_idx = 0;
469  for (i = 0; i < nb_channels; i++)
470  showwaves->buf_idy[i] = 0;
471  return ret;
472 }
473 
474 static int push_single_pic(AVFilterLink *outlink)
475 {
476  AVFilterContext *ctx = outlink->src;
477  AVFilterLink *inlink = ctx->inputs[0];
478  ShowWavesContext *showwaves = ctx->priv;
479  int64_t n = 0, max_samples = showwaves->total_samples / outlink->w;
480  AVFrame *out = showwaves->outpicref;
481  struct frame_node *node;
482  const int nb_channels = inlink->channels;
483  const int ch_height = showwaves->split_channels ? outlink->h / nb_channels : outlink->h;
484  const int linesize = out->linesize[0];
485  const int pixstep = showwaves->pixstep;
486  int col = 0;
487  int64_t *sum = showwaves->sum;
488 
489  if (max_samples == 0) {
490  av_log(ctx, AV_LOG_ERROR, "Too few samples\n");
491  return AVERROR(EINVAL);
492  }
493 
494  av_log(ctx, AV_LOG_DEBUG, "Create frame averaging %"PRId64" samples per column\n", max_samples);
495 
496  memset(sum, 0, nb_channels);
497 
498  for (node = showwaves->audio_frames; node; node = node->next) {
499  int i;
500  const AVFrame *frame = node->frame;
501  const int16_t *p = (const int16_t *)frame->data[0];
502 
503  for (i = 0; i < frame->nb_samples; i++) {
504  int ch;
505 
506  for (ch = 0; ch < nb_channels; ch++)
507  sum[ch] += abs(p[ch + i*nb_channels]) << 1;
508  if (n++ == max_samples) {
509  for (ch = 0; ch < nb_channels; ch++) {
510  int16_t sample = sum[ch] / max_samples;
511  uint8_t *buf = out->data[0] + col * pixstep;
512  int h;
513 
514  if (showwaves->split_channels)
515  buf += ch*ch_height*linesize;
516  av_assert0(col < outlink->w);
517  h = showwaves->get_h(sample, ch_height);
518  showwaves->draw_sample(buf, ch_height, linesize, &showwaves->buf_idy[ch], &showwaves->fg[ch * 4], h);
519  sum[ch] = 0;
520  }
521  col++;
522  n = 0;
523  }
524  }
525  }
526 
527  return push_frame(outlink);
528 }
529 
530 
531 static int request_frame(AVFilterLink *outlink)
532 {
533  ShowWavesContext *showwaves = outlink->src->priv;
534  AVFilterLink *inlink = outlink->src->inputs[0];
535  int ret;
536 
537  ret = ff_request_frame(inlink);
538  if (ret == AVERROR_EOF && showwaves->outpicref) {
539  if (showwaves->single_pic)
540  push_single_pic(outlink);
541  else
542  push_frame(outlink);
543  }
544 
545  return ret;
546 }
547 
548 static int alloc_out_frame(ShowWavesContext *showwaves, const int16_t *p,
549  const AVFilterLink *inlink, AVFilterLink *outlink,
550  const AVFrame *in)
551 {
552  if (!showwaves->outpicref) {
553  int j;
554  AVFrame *out = showwaves->outpicref =
555  ff_get_video_buffer(outlink, outlink->w, outlink->h);
556  if (!out)
557  return AVERROR(ENOMEM);
558  out->width = outlink->w;
559  out->height = outlink->h;
560  out->pts = in->pts + av_rescale_q((p - (int16_t *)in->data[0]) / inlink->channels,
561  av_make_q(1, inlink->sample_rate),
562  outlink->time_base);
563  for (j = 0; j < outlink->h; j++)
564  memset(out->data[0] + j*out->linesize[0], 0, outlink->w * showwaves->pixstep);
565  }
566  return 0;
567 }
568 
570 {
571  ShowWavesContext *showwaves = ctx->priv;
572 
573  if (!strcmp(ctx->filter->name, "showwavespic")) {
574  showwaves->single_pic = 1;
575  showwaves->mode = MODE_CENTERED_LINE;
576  }
577 
578  return 0;
579 }
580 
581 #if CONFIG_SHOWWAVES_FILTER
582 
583 static int showwaves_filter_frame(AVFilterLink *inlink, AVFrame *insamples)
584 {
585  AVFilterContext *ctx = inlink->dst;
586  AVFilterLink *outlink = ctx->outputs[0];
587  ShowWavesContext *showwaves = ctx->priv;
588  const int nb_samples = insamples->nb_samples;
589  AVFrame *outpicref = showwaves->outpicref;
590  int16_t *p = (int16_t *)insamples->data[0];
591  int nb_channels = inlink->channels;
592  int i, j, ret = 0;
593  const int pixstep = showwaves->pixstep;
594  const int n = showwaves->n;
595  const int ch_height = showwaves->split_channels ? outlink->h / nb_channels : outlink->h;
596 
597  /* draw data in the buffer */
598  for (i = 0; i < nb_samples; i++) {
599 
600  ret = alloc_out_frame(showwaves, p, inlink, outlink, insamples);
601  if (ret < 0)
602  goto end;
603  outpicref = showwaves->outpicref;
604 
605  for (j = 0; j < nb_channels; j++) {
606  uint8_t *buf = outpicref->data[0] + showwaves->buf_idx * pixstep;
607  const int linesize = outpicref->linesize[0];
608  int h;
609 
610  if (showwaves->split_channels)
611  buf += j*ch_height*linesize;
612  h = showwaves->get_h(*p++, ch_height);
613  showwaves->draw_sample(buf, ch_height, linesize,
614  &showwaves->buf_idy[j], &showwaves->fg[j * 4], h);
615  }
616 
617  showwaves->sample_count_mod++;
618  if (showwaves->sample_count_mod == n) {
619  showwaves->sample_count_mod = 0;
620  showwaves->buf_idx++;
621  }
622  if (showwaves->buf_idx == showwaves->w)
623  if ((ret = push_frame(outlink)) < 0)
624  break;
625  outpicref = showwaves->outpicref;
626  }
627 
628 end:
629  av_frame_free(&insamples);
630  return ret;
631 }
632 
633 static const AVFilterPad showwaves_inputs[] = {
634  {
635  .name = "default",
636  .type = AVMEDIA_TYPE_AUDIO,
637  .filter_frame = showwaves_filter_frame,
638  },
639  { NULL }
640 };
641 
642 static const AVFilterPad showwaves_outputs[] = {
643  {
644  .name = "default",
645  .type = AVMEDIA_TYPE_VIDEO,
646  .config_props = config_output,
647  .request_frame = request_frame,
648  },
649  { NULL }
650 };
651 
652 AVFilter ff_avf_showwaves = {
653  .name = "showwaves",
654  .description = NULL_IF_CONFIG_SMALL("Convert input audio to a video output."),
655  .init = init,
656  .uninit = uninit,
657  .query_formats = query_formats,
658  .priv_size = sizeof(ShowWavesContext),
659  .inputs = showwaves_inputs,
660  .outputs = showwaves_outputs,
661  .priv_class = &showwaves_class,
662 };
663 
664 #endif // CONFIG_SHOWWAVES_FILTER
665 
666 #if CONFIG_SHOWWAVESPIC_FILTER
667 
668 #define OFFSET(x) offsetof(ShowWavesContext, x)
669 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
670 
671 static const AVOption showwavespic_options[] = {
672  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
673  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
674  { "split_channels", "draw channels separately", OFFSET(split_channels), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, FLAGS },
675  { "colors", "set channels colors", OFFSET(colors), AV_OPT_TYPE_STRING, {.str = "red|green|blue|yellow|orange|lime|pink|magenta|brown" }, 0, 0, FLAGS },
676  { "scale", "set amplitude scale", OFFSET(scale), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, SCALE_NB-1, FLAGS, .unit="scale" },
677  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LIN}, .flags=FLAGS, .unit="scale"},
678  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LOG}, .flags=FLAGS, .unit="scale"},
679  { "sqrt", "square root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_SQRT}, .flags=FLAGS, .unit="scale"},
680  { "cbrt", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_CBRT}, .flags=FLAGS, .unit="scale"},
681  { NULL }
682 };
683 
684 AVFILTER_DEFINE_CLASS(showwavespic);
685 
686 static int showwavespic_config_input(AVFilterLink *inlink)
687 {
688  AVFilterContext *ctx = inlink->dst;
689  ShowWavesContext *showwaves = ctx->priv;
690 
691  if (showwaves->single_pic) {
692  showwaves->sum = av_mallocz_array(inlink->channels, sizeof(*showwaves->sum));
693  if (!showwaves->sum)
694  return AVERROR(ENOMEM);
695  }
696 
697  return 0;
698 }
699 
700 static int showwavespic_filter_frame(AVFilterLink *inlink, AVFrame *insamples)
701 {
702  AVFilterContext *ctx = inlink->dst;
703  AVFilterLink *outlink = ctx->outputs[0];
704  ShowWavesContext *showwaves = ctx->priv;
705  int16_t *p = (int16_t *)insamples->data[0];
706  int ret = 0;
707 
708  if (showwaves->single_pic) {
709  struct frame_node *f;
710 
711  ret = alloc_out_frame(showwaves, p, inlink, outlink, insamples);
712  if (ret < 0)
713  goto end;
714 
715  /* queue the audio frame */
716  f = av_malloc(sizeof(*f));
717  if (!f) {
718  ret = AVERROR(ENOMEM);
719  goto end;
720  }
721  f->frame = insamples;
722  f->next = NULL;
723  if (!showwaves->last_frame) {
724  showwaves->audio_frames =
725  showwaves->last_frame = f;
726  } else {
727  showwaves->last_frame->next = f;
728  showwaves->last_frame = f;
729  }
730  showwaves->total_samples += insamples->nb_samples;
731 
732  return 0;
733  }
734 
735 end:
736  av_frame_free(&insamples);
737  return ret;
738 }
739 
740 static const AVFilterPad showwavespic_inputs[] = {
741  {
742  .name = "default",
743  .type = AVMEDIA_TYPE_AUDIO,
744  .config_props = showwavespic_config_input,
745  .filter_frame = showwavespic_filter_frame,
746  },
747  { NULL }
748 };
749 
750 static const AVFilterPad showwavespic_outputs[] = {
751  {
752  .name = "default",
753  .type = AVMEDIA_TYPE_VIDEO,
754  .config_props = config_output,
755  .request_frame = request_frame,
756  },
757  { NULL }
758 };
759 
760 AVFilter ff_avf_showwavespic = {
761  .name = "showwavespic",
762  .description = NULL_IF_CONFIG_SMALL("Convert input audio to a video output single picture."),
763  .init = init,
764  .uninit = uninit,
765  .query_formats = query_formats,
766  .priv_size = sizeof(ShowWavesContext),
767  .inputs = showwavespic_inputs,
768  .outputs = showwavespic_outputs,
769  .priv_class = &showwavespic_class,
770 };
771 
772 #endif // CONFIG_SHOWWAVESPIC_FILTER
ShowWavesMode
Definition: avf_showwaves.c:37
#define NULL
Definition: coverity.c:32
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
This structure describes decoded (raw) audio or video data.
Definition: frame.h:187
AVOption.
Definition: opt.h:246
static int alloc_out_frame(ShowWavesContext *showwaves, const int16_t *p, const AVFilterLink *inlink, AVFilterLink *outlink, const AVFrame *in)
int16_t * buf_idy
Definition: avf_showwaves.c:64
#define OFFSET(x)
Definition: avf_showwaves.c:86
Main libavfilter public API header.
ShowWavesScale
Definition: avf_showwaves.c:45
static void draw_sample_line_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:92
#define sample
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
static void draw_sample_cline_rgba(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
const char * name
Pad name.
Definition: internal.h:60
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:331
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
int ff_channel_layouts_ref(AVFilterChannelLayouts *f, AVFilterChannelLayouts **ref)
Add *ref as a new reference to f.
Definition: formats.c:435
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1125
uint8_t
#define av_cold
Definition: attributes.h:82
#define av_malloc(s)
AVOptions.
static const uint32_t color[16+AV_CLASS_CATEGORY_NB]
Definition: log.c:94
static av_cold int end(AVCodecContext *avctx)
Definition: avrndec.c:90
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:271
static av_cold void uninit(AVFilterContext *ctx)
static AVFrame * frame
#define height
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
#define AVERROR_EOF
End of file.
Definition: error.h:55
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
#define av_log(a,...)
int mode
ShowWavesMode.
Definition: avf_showwaves.c:69
int av_parse_color(uint8_t *rgba_color, const char *color_string, int slen, void *log_ctx)
Put the RGBA values that correspond to color_string in rgba_color.
Definition: parseutils.c:354
A filter pad used for either input or output.
Definition: internal.h:54
int64_t total_samples
Definition: avf_showwaves.c:82
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AVRational rate
Definition: avf_showwaves.c:61
int width
width and height of the video frame
Definition: frame.h:239
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
struct frame_node * last_frame
Definition: avf_showwaves.c:81
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:163
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:179
void * priv
private data for use by the filter
Definition: avfilter.h:338
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
#define cbrt
Definition: tablegen.h:35
simple assert() macros that are a bit more flexible than ISO C assert().
#define FFMAX(a, b)
Definition: common.h:94
static void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.h:229
static int get_lin_h2(int16_t sample, int height)
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:94
static int get_cbrt_h(int16_t sample, int height)
AVFrame * outpicref
Definition: avf_showwaves.c:65
audio channel layout utility functions
int64_t av_rescale(int64_t a, int64_t b, int64_t c)
Rescale a 64-bit integer with rounding to nearest.
Definition: mathematics.c:129
struct frame_node * audio_frames
Definition: avf_showwaves.c:80
static void draw_sample_p2p_rgba(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:440
static int push_single_pic(AVFilterLink *outlink)
#define FFSIGN(a)
Definition: common.h:73
typedef void(APIENTRY *FF_PFNGLACTIVETEXTUREPROC)(GLenum texture)
AVFormatContext * ctx
Definition: movenc.c:48
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:72
int n
Definition: avisynth_c.h:684
static const AVFilterPad outputs[]
Definition: af_afftfilt.c:386
AVFilterChannelLayouts * ff_all_channel_layouts(void)
Construct an empty AVFilterChannelLayouts/AVFilterFormats struct – representing any channel layout (w...
Definition: formats.c:401
A list of supported channel layouts.
Definition: formats.h:85
struct frame_node * next
Definition: avf_showwaves.c:55
int scale
ShowWavesScale.
Definition: avf_showwaves.c:70
static const AVFilterPad inputs[]
Definition: af_afftfilt.c:376
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:237
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:58
static int config_output(AVFilterLink *outlink)
static int push_frame(AVFilterLink *outlink)
static int get_cbrt_h2(int16_t sample, int height)
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:218
static int request_frame(AVFilterLink *outlink)
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
static int get_log_h2(int16_t sample, int height)
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(constint16_t *) pi >>8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(constint32_t *) pi >>24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(constfloat *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(constfloat *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(constfloat *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(constdouble *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(constdouble *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(constdouble *) pi *(1U<< 31))))#defineSET_CONV_FUNC_GROUP(ofmt, ifmt) staticvoidset_generic_function(AudioConvert *ac){}voidff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enumAVSampleFormatout_fmt, enumAVSampleFormatin_fmt, intchannels, intsample_rate, intapply_map){AudioConvert *ac;intin_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) returnNULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt)>2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);returnNULL;}returnac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}elseif(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;elseac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);returnac;}intff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){intuse_generic=1;intlen=in->nb_samples;intp;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%dsamples-audio_convert:%sto%s(dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));returnff_convert_dither(ac-> in
static void draw_sample_line_rgba(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
void * buf
Definition: avisynth_c.h:690
static int get_log_h(int16_t sample, int height)
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
AVFILTER_DEFINE_CLASS(showwaves)
int(* get_h)(int16_t sample, int height)
Definition: avf_showwaves.c:74
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:144
Rational number (pair of numerator and denominator).
Definition: rational.h:58
static int query_formats(AVFilterContext *ctx)
offset must point to AVRational
Definition: opt.h:236
const char * name
Filter name.
Definition: avfilter.h:148
static void draw_sample_point_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
offset must point to two consecutive integers
Definition: opt.h:233
misc parsing utilities
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:335
enum MovChannelLayoutTag * layouts
Definition: mov_chan.c:434
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:262
AVFilterFormats * ff_all_samplerates(void)
Definition: formats.c:395
static int get_sqrt_h2(int16_t sample, int height)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:201
char * av_strtok(char *s, const char *delim, char **saveptr)
Split the string into several tokens which can be accessed by successive calls to av_strtok()...
Definition: avstring.c:184
static void draw_sample_p2p_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
int
Y , 8bpp.
Definition: pixfmt.h:70
void(* draw_sample)(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:75
if(ret< 0)
Definition: vf_mcdeint.c:282
static int get_sqrt_h(int16_t sample, int height)
signed 16 bits
Definition: samplefmt.h:61
uint8_t pi<< 24) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_U8,(uint64_t)((*(constuint8_t *) pi-0x80U))<< 56) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8,(*(constuint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8,(*(constuint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16,(*(constint16_t *) pi >>8)+0x80) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_S16,(uint64_t)(*(constint16_t *) pi)<< 48) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16,*(constint16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16,*(constint16_t *) pi *(1.0/(1<< 15))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32,(*(constint32_t *) pi >>24)+0x80) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_S32,(uint64_t)(*(constint32_t *) pi)<< 32) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32,*(constint32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32,*(constint32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S64,(*(constint64_t *) pi >>56)+0x80) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S64,*(constint64_t *) pi *(1.0f/(INT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S64,*(constint64_t *) pi *(1.0/(INT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, av_clip_uint8(lrintf(*(constfloat *) pi *(1<< 7))+0x80)) CONV_FUNC(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, av_clip_int16(lrintf(*(constfloat *) pi *(1<< 15)))) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, av_clipl_int32(llrintf(*(constfloat *) pi *(1U<< 31)))) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_FLT, llrintf(*(constfloat *) pi *(INT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, av_clip_uint8(lrint(*(constdouble *) pi *(1<< 7))+0x80)) CONV_FUNC(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, av_clip_int16(lrint(*(constdouble *) pi *(1<< 15)))) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, av_clipl_int32(llrint(*(constdouble *) pi *(1U<< 31)))) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_DBL, llrint(*(constdouble *) pi *(INT64_C(1)<< 63)))#defineFMT_PAIR_FUNC(out, in) staticconv_func_type *constfmt_pair_to_conv_functions[AV_SAMPLE_FMT_NB *AV_SAMPLE_FMT_NB]={FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S64),};staticvoidcpy1(uint8_t **dst, constuint8_t **src, intlen){memcpy(*dst,*src, len);}staticvoidcpy2(uint8_t **dst, constuint8_t **src, intlen){memcpy(*dst,*src, 2 *len);}staticvoidcpy4(uint8_t **dst, constuint8_t **src, intlen){memcpy(*dst,*src, 4 *len);}staticvoidcpy8(uint8_t **dst, constuint8_t **src, intlen){memcpy(*dst,*src, 8 *len);}AudioConvert *swri_audio_convert_alloc(enumAVSampleFormatout_fmt, enumAVSampleFormatin_fmt, intchannels, constint *ch_map, intflags){AudioConvert *ctx;conv_func_type *f=fmt_pair_to_conv_functions[av_get_packed_sample_fmt(out_fmt)+AV_SAMPLE_FMT_NB *av_get_packed_sample_fmt(in_fmt)];if(!f) returnNULL;ctx=av_mallocz(sizeof(*ctx));if(!ctx) returnNULL;if(channels==1){in_fmt=av_get_planar_sample_fmt(in_fmt);out_fmt=av_get_planar_sample_fmt(out_fmt);}ctx->channels=channels;ctx->conv_f=f;ctx->ch_map=ch_map;if(in_fmt==AV_SAMPLE_FMT_U8||in_fmt==AV_SAMPLE_FMT_U8P) memset(ctx->silence, 0x80, sizeof(ctx->silence));if(out_fmt==in_fmt &&!ch_map){switch(av_get_bytes_per_sample(in_fmt)){case1:ctx->simd_f=cpy1;break;case2:ctx->simd_f=cpy2;break;case4:ctx->simd_f=cpy4;break;case8:ctx->simd_f=cpy8;break;}}if(HAVE_YASM &&1) swri_audio_convert_init_x86(ctx, out_fmt, in_fmt, channels);if(ARCH_ARM) swri_audio_convert_init_arm(ctx, out_fmt, in_fmt, channels);if(ARCH_AARCH64) swri_audio_convert_init_aarch64(ctx, out_fmt, in_fmt, channels);returnctx;}voidswri_audio_convert_free(AudioConvert **ctx){av_freep(ctx);}intswri_audio_convert(AudioConvert *ctx, AudioData *out, AudioData *in, intlen){intch;intoff=0;constintos=(out->planar?1:out->ch_count)*out->bps;unsignedmisaligned=0;av_assert0(ctx->channels==out->ch_count);if(ctx->in_simd_align_mask){intplanes=in->planar?in->ch_count:1;unsignedm=0;for(ch=0;ch< planes;ch++) m|=(intptr_t) in->ch[ch];misaligned|=m &ctx->in_simd_align_mask;}if(ctx->out_simd_align_mask){intplanes=out->planar?out->ch_count:1;unsignedm=0;for(ch=0;ch< planes;ch++) m|=(intptr_t) out->ch[ch];misaligned|=m &ctx->out_simd_align_mask;}if(ctx->simd_f &&!ctx->ch_map &&!misaligned){off=len &~15;av_assert1(off >=0);av_assert1(off<=len);av_assert2(ctx->channels==SWR_CH_MAX||!in->ch[ctx->channels]);if(off >0){if(out->planar==in->planar){intplanes=out->planar?out->ch_count:1;for(ch=0;ch< planes;ch++){ctx->simd_f(out-> ch ch
Definition: audioconvert.c:56
static int get_lin_h(int16_t sample, int height)
#define FLAGS
Definition: avf_showwaves.c:87
#define av_free(p)
A list of supported formats for one end of a filter link.
Definition: formats.h:64
An instance of a filter.
Definition: avfilter.h:323
static enum AVSampleFormat sample_fmts[]
Definition: adpcmenc.c:701
int height
Definition: frame.h:239
FILE * out
Definition: movenc.c:54
#define av_freep(p)
void INT64 start
Definition: avisynth_c.h:690
#define av_malloc_array(a, b)
static void draw_sample_point_rgba(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:405
formats
Definition: signature.h:48
#define FFSWAP(type, a, b)
Definition: common.h:99
int nb_channels
internal API functions
static const AVOption showwaves_options[]
Definition: avf_showwaves.c:89
static av_cold int init(AVFilterContext *ctx)
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
mode
Use these values in ebur128_init (or'ed).
Definition: ebur128.h:83
int nb_samples
number of audio samples (per channel) described by this frame
Definition: frame.h:244
const AVFilter * filter
the AVFilter of which this is an instance
Definition: avfilter.h:326
static void draw_sample_cline_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
for(j=16;j >0;--j)
AVFrame * frame
Definition: avf_showwaves.c:54
static uint8_t tmp[11]
Definition: aes_ctr.c:26