FFmpeg
avf_showspectrum.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2012-2013 Clément Bœsch
3  * Copyright (c) 2013 Rudolf Polzer <divverent@xonotic.org>
4  * Copyright (c) 2015 Paul B Mahol
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * audio to spectrum (video) transmedia filter, based on ffplay rdft showmode
26  * (by Michael Niedermayer) and lavfi/avf_showwaves (by Stefano Sabatini).
27  */
28 
29 #include <math.h>
30 
31 #include "libavcodec/avfft.h"
32 #include "libavutil/audio_fifo.h"
33 #include "libavutil/avassert.h"
34 #include "libavutil/avstring.h"
36 #include "libavutil/opt.h"
37 #include "libavutil/parseutils.h"
39 #include "audio.h"
40 #include "video.h"
41 #include "avfilter.h"
42 #include "filters.h"
43 #include "internal.h"
44 #include "window_func.h"
45 
53 
54 typedef struct ShowSpectrumContext {
55  const AVClass *class;
56  int w, h;
57  char *rate_str;
65  int sliding; ///< 1 if sliding mode, 0 otherwise
66  int mode; ///< channel display mode
67  int color_mode; ///< display color scheme
68  int scale;
69  int fscale;
70  float saturation; ///< color saturation multiplier
71  float rotation; ///< color rotation
72  int start, stop; ///< zoom mode
73  int data;
74  int xpos; ///< x position (current column)
75  FFTContext **fft; ///< Fast Fourier Transform context
76  FFTContext **ifft; ///< Inverse Fast Fourier Transform context
77  int fft_bits; ///< number of bits (FFT window size = 1<<fft_bits)
78  FFTComplex **fft_data; ///< bins holder for each (displayed) channels
79  FFTComplex **fft_scratch; ///< scratch buffers
80  float *window_func_lut; ///< Window function LUT
81  float **magnitudes;
82  float **phases;
83  int win_func;
84  int win_size;
85  int buf_size;
86  double win_scale;
87  float overlap;
88  float gain;
89  int consumed;
90  int hop_size;
91  float *combine_buffer; ///< color combining buffer (3 * h items)
92  float **color_buffer; ///< color buffer (3 * h * ch items)
94  int64_t pts;
95  int64_t old_pts;
96  int old_len;
98  int legend;
100  int (*plot_channel)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
102 
103 #define OFFSET(x) offsetof(ShowSpectrumContext, x)
104 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
105 
106 static const AVOption showspectrum_options[] = {
107  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "640x512"}, 0, 0, FLAGS },
108  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "640x512"}, 0, 0, FLAGS },
109  { "slide", "set sliding mode", OFFSET(sliding), AV_OPT_TYPE_INT, {.i64 = 0}, 0, NB_SLIDES-1, FLAGS, "slide" },
110  { "replace", "replace old columns with new", 0, AV_OPT_TYPE_CONST, {.i64=REPLACE}, 0, 0, FLAGS, "slide" },
111  { "scroll", "scroll from right to left", 0, AV_OPT_TYPE_CONST, {.i64=SCROLL}, 0, 0, FLAGS, "slide" },
112  { "fullframe", "return full frames", 0, AV_OPT_TYPE_CONST, {.i64=FULLFRAME}, 0, 0, FLAGS, "slide" },
113  { "rscroll", "scroll from left to right", 0, AV_OPT_TYPE_CONST, {.i64=RSCROLL}, 0, 0, FLAGS, "slide" },
114  { "mode", "set channel display mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=COMBINED}, COMBINED, NB_MODES-1, FLAGS, "mode" },
115  { "combined", "combined mode", 0, AV_OPT_TYPE_CONST, {.i64=COMBINED}, 0, 0, FLAGS, "mode" },
116  { "separate", "separate mode", 0, AV_OPT_TYPE_CONST, {.i64=SEPARATE}, 0, 0, FLAGS, "mode" },
117  { "color", "set channel coloring", OFFSET(color_mode), AV_OPT_TYPE_INT, {.i64=CHANNEL}, CHANNEL, NB_CLMODES-1, FLAGS, "color" },
118  { "channel", "separate color for each channel", 0, AV_OPT_TYPE_CONST, {.i64=CHANNEL}, 0, 0, FLAGS, "color" },
119  { "intensity", "intensity based coloring", 0, AV_OPT_TYPE_CONST, {.i64=INTENSITY}, 0, 0, FLAGS, "color" },
120  { "rainbow", "rainbow based coloring", 0, AV_OPT_TYPE_CONST, {.i64=RAINBOW}, 0, 0, FLAGS, "color" },
121  { "moreland", "moreland based coloring", 0, AV_OPT_TYPE_CONST, {.i64=MORELAND}, 0, 0, FLAGS, "color" },
122  { "nebulae", "nebulae based coloring", 0, AV_OPT_TYPE_CONST, {.i64=NEBULAE}, 0, 0, FLAGS, "color" },
123  { "fire", "fire based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FIRE}, 0, 0, FLAGS, "color" },
124  { "fiery", "fiery based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FIERY}, 0, 0, FLAGS, "color" },
125  { "fruit", "fruit based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FRUIT}, 0, 0, FLAGS, "color" },
126  { "cool", "cool based coloring", 0, AV_OPT_TYPE_CONST, {.i64=COOL}, 0, 0, FLAGS, "color" },
127  { "magma", "magma based coloring", 0, AV_OPT_TYPE_CONST, {.i64=MAGMA}, 0, 0, FLAGS, "color" },
128  { "green", "green based coloring", 0, AV_OPT_TYPE_CONST, {.i64=GREEN}, 0, 0, FLAGS, "color" },
129  { "viridis", "viridis based coloring", 0, AV_OPT_TYPE_CONST, {.i64=VIRIDIS}, 0, 0, FLAGS, "color" },
130  { "plasma", "plasma based coloring", 0, AV_OPT_TYPE_CONST, {.i64=PLASMA}, 0, 0, FLAGS, "color" },
131  { "cividis", "cividis based coloring", 0, AV_OPT_TYPE_CONST, {.i64=CIVIDIS}, 0, 0, FLAGS, "color" },
132  { "terrain", "terrain based coloring", 0, AV_OPT_TYPE_CONST, {.i64=TERRAIN}, 0, 0, FLAGS, "color" },
133  { "scale", "set display scale", OFFSET(scale), AV_OPT_TYPE_INT, {.i64=SQRT}, LINEAR, NB_SCALES-1, FLAGS, "scale" },
134  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=LINEAR}, 0, 0, FLAGS, "scale" },
135  { "sqrt", "square root", 0, AV_OPT_TYPE_CONST, {.i64=SQRT}, 0, 0, FLAGS, "scale" },
136  { "cbrt", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64=CBRT}, 0, 0, FLAGS, "scale" },
137  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=LOG}, 0, 0, FLAGS, "scale" },
138  { "4thrt","4th root", 0, AV_OPT_TYPE_CONST, {.i64=FOURTHRT}, 0, 0, FLAGS, "scale" },
139  { "5thrt","5th root", 0, AV_OPT_TYPE_CONST, {.i64=FIFTHRT}, 0, 0, FLAGS, "scale" },
140  { "fscale", "set frequency scale", OFFSET(fscale), AV_OPT_TYPE_INT, {.i64=F_LINEAR}, 0, NB_FSCALES-1, FLAGS, "fscale" },
141  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=F_LINEAR}, 0, 0, FLAGS, "fscale" },
142  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=F_LOG}, 0, 0, FLAGS, "fscale" },
143  { "saturation", "color saturation multiplier", OFFSET(saturation), AV_OPT_TYPE_FLOAT, {.dbl = 1}, -10, 10, FLAGS },
144  { "win_func", "set window function", OFFSET(win_func), AV_OPT_TYPE_INT, {.i64 = WFUNC_HANNING}, 0, NB_WFUNC-1, FLAGS, "win_func" },
145  { "rect", "Rectangular", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_RECT}, 0, 0, FLAGS, "win_func" },
146  { "bartlett", "Bartlett", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BARTLETT}, 0, 0, FLAGS, "win_func" },
147  { "hann", "Hann", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_HANNING}, 0, 0, FLAGS, "win_func" },
148  { "hanning", "Hanning", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_HANNING}, 0, 0, FLAGS, "win_func" },
149  { "hamming", "Hamming", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_HAMMING}, 0, 0, FLAGS, "win_func" },
150  { "blackman", "Blackman", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BLACKMAN}, 0, 0, FLAGS, "win_func" },
151  { "welch", "Welch", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_WELCH}, 0, 0, FLAGS, "win_func" },
152  { "flattop", "Flat-top", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_FLATTOP}, 0, 0, FLAGS, "win_func" },
153  { "bharris", "Blackman-Harris", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BHARRIS}, 0, 0, FLAGS, "win_func" },
154  { "bnuttall", "Blackman-Nuttall", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BNUTTALL}, 0, 0, FLAGS, "win_func" },
155  { "bhann", "Bartlett-Hann", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BHANN}, 0, 0, FLAGS, "win_func" },
156  { "sine", "Sine", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_SINE}, 0, 0, FLAGS, "win_func" },
157  { "nuttall", "Nuttall", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_NUTTALL}, 0, 0, FLAGS, "win_func" },
158  { "lanczos", "Lanczos", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_LANCZOS}, 0, 0, FLAGS, "win_func" },
159  { "gauss", "Gauss", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_GAUSS}, 0, 0, FLAGS, "win_func" },
160  { "tukey", "Tukey", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_TUKEY}, 0, 0, FLAGS, "win_func" },
161  { "dolph", "Dolph-Chebyshev", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_DOLPH}, 0, 0, FLAGS, "win_func" },
162  { "cauchy", "Cauchy", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_CAUCHY}, 0, 0, FLAGS, "win_func" },
163  { "parzen", "Parzen", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_PARZEN}, 0, 0, FLAGS, "win_func" },
164  { "poisson", "Poisson", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_POISSON}, 0, 0, FLAGS, "win_func" },
165  { "bohman", "Bohman", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BOHMAN}, 0, 0, FLAGS, "win_func" },
166  { "orientation", "set orientation", OFFSET(orientation), AV_OPT_TYPE_INT, {.i64=VERTICAL}, 0, NB_ORIENTATIONS-1, FLAGS, "orientation" },
167  { "vertical", NULL, 0, AV_OPT_TYPE_CONST, {.i64=VERTICAL}, 0, 0, FLAGS, "orientation" },
168  { "horizontal", NULL, 0, AV_OPT_TYPE_CONST, {.i64=HORIZONTAL}, 0, 0, FLAGS, "orientation" },
169  { "overlap", "set window overlap", OFFSET(overlap), AV_OPT_TYPE_FLOAT, {.dbl = 0}, 0, 1, FLAGS },
170  { "gain", "set scale gain", OFFSET(gain), AV_OPT_TYPE_FLOAT, {.dbl = 1}, 0, 128, FLAGS },
171  { "data", "set data mode", OFFSET(data), AV_OPT_TYPE_INT, {.i64 = 0}, 0, NB_DMODES-1, FLAGS, "data" },
172  { "magnitude", NULL, 0, AV_OPT_TYPE_CONST, {.i64=D_MAGNITUDE}, 0, 0, FLAGS, "data" },
173  { "phase", NULL, 0, AV_OPT_TYPE_CONST, {.i64=D_PHASE}, 0, 0, FLAGS, "data" },
174  { "rotation", "color rotation", OFFSET(rotation), AV_OPT_TYPE_FLOAT, {.dbl = 0}, -1, 1, FLAGS },
175  { "start", "start frequency", OFFSET(start), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT32_MAX, FLAGS },
176  { "stop", "stop frequency", OFFSET(stop), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT32_MAX, FLAGS },
177  { "fps", "set video rate", OFFSET(rate_str), AV_OPT_TYPE_STRING, {.str = "auto"}, 0, 0, FLAGS },
178  { "legend", "draw legend", OFFSET(legend), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, FLAGS },
179  { NULL }
180 };
181 
182 AVFILTER_DEFINE_CLASS(showspectrum);
183 
184 static const struct ColorTable {
185  float a, y, u, v;
186 } color_table[][8] = {
187  [INTENSITY] = {
188  { 0, 0, 0, 0 },
189  { 0.13, .03587126228984074, .1573300977624594, -.02548747583751842 },
190  { 0.30, .18572281794568020, .1772436246393981, .17475554840414750 },
191  { 0.60, .28184980583656130, -.1593064119945782, .47132074554608920 },
192  { 0.73, .65830621175547810, -.3716070802232764, .24352759331252930 },
193  { 0.78, .76318535758242900, -.4307467689263783, .16866496622310430 },
194  { 0.91, .95336363636363640, -.2045454545454546, .03313636363636363 },
195  { 1, 1, 0, 0 }},
196  [RAINBOW] = {
197  { 0, 0, 0, 0 },
198  { 0.13, 44/256., (189-128)/256., (138-128)/256. },
199  { 0.25, 29/256., (186-128)/256., (119-128)/256. },
200  { 0.38, 119/256., (194-128)/256., (53-128)/256. },
201  { 0.60, 111/256., (73-128)/256., (59-128)/256. },
202  { 0.73, 205/256., (19-128)/256., (149-128)/256. },
203  { 0.86, 135/256., (83-128)/256., (200-128)/256. },
204  { 1, 73/256., (95-128)/256., (225-128)/256. }},
205  [MORELAND] = {
206  { 0, 44/256., (181-128)/256., (112-128)/256. },
207  { 0.13, 126/256., (177-128)/256., (106-128)/256. },
208  { 0.25, 164/256., (163-128)/256., (109-128)/256. },
209  { 0.38, 200/256., (140-128)/256., (120-128)/256. },
210  { 0.60, 201/256., (117-128)/256., (141-128)/256. },
211  { 0.73, 177/256., (103-128)/256., (165-128)/256. },
212  { 0.86, 136/256., (100-128)/256., (183-128)/256. },
213  { 1, 68/256., (117-128)/256., (203-128)/256. }},
214  [NEBULAE] = {
215  { 0, 10/256., (134-128)/256., (132-128)/256. },
216  { 0.23, 21/256., (137-128)/256., (130-128)/256. },
217  { 0.45, 35/256., (134-128)/256., (134-128)/256. },
218  { 0.57, 51/256., (130-128)/256., (139-128)/256. },
219  { 0.67, 104/256., (116-128)/256., (162-128)/256. },
220  { 0.77, 120/256., (105-128)/256., (188-128)/256. },
221  { 0.87, 140/256., (105-128)/256., (188-128)/256. },
222  { 1, 1, 0, 0 }},
223  [FIRE] = {
224  { 0, 0, 0, 0 },
225  { 0.23, 44/256., (132-128)/256., (127-128)/256. },
226  { 0.45, 62/256., (116-128)/256., (140-128)/256. },
227  { 0.57, 75/256., (105-128)/256., (152-128)/256. },
228  { 0.67, 95/256., (91-128)/256., (166-128)/256. },
229  { 0.77, 126/256., (74-128)/256., (172-128)/256. },
230  { 0.87, 164/256., (73-128)/256., (162-128)/256. },
231  { 1, 1, 0, 0 }},
232  [FIERY] = {
233  { 0, 0, 0, 0 },
234  { 0.23, 36/256., (116-128)/256., (163-128)/256. },
235  { 0.45, 52/256., (102-128)/256., (200-128)/256. },
236  { 0.57, 116/256., (84-128)/256., (196-128)/256. },
237  { 0.67, 157/256., (67-128)/256., (181-128)/256. },
238  { 0.77, 193/256., (40-128)/256., (155-128)/256. },
239  { 0.87, 221/256., (101-128)/256., (134-128)/256. },
240  { 1, 1, 0, 0 }},
241  [FRUIT] = {
242  { 0, 0, 0, 0 },
243  { 0.20, 29/256., (136-128)/256., (119-128)/256. },
244  { 0.30, 60/256., (119-128)/256., (90-128)/256. },
245  { 0.40, 85/256., (91-128)/256., (85-128)/256. },
246  { 0.50, 116/256., (70-128)/256., (105-128)/256. },
247  { 0.60, 151/256., (50-128)/256., (146-128)/256. },
248  { 0.70, 191/256., (63-128)/256., (178-128)/256. },
249  { 1, 98/256., (80-128)/256., (221-128)/256. }},
250  [COOL] = {
251  { 0, 0, 0, 0 },
252  { .15, 0, .5, -.5 },
253  { 1, 1, -.5, .5 }},
254  [MAGMA] = {
255  { 0, 0, 0, 0 },
256  { 0.10, 23/256., (175-128)/256., (120-128)/256. },
257  { 0.23, 43/256., (158-128)/256., (144-128)/256. },
258  { 0.35, 85/256., (138-128)/256., (179-128)/256. },
259  { 0.48, 96/256., (128-128)/256., (189-128)/256. },
260  { 0.64, 128/256., (103-128)/256., (214-128)/256. },
261  { 0.92, 205/256., (80-128)/256., (152-128)/256. },
262  { 1, 1, 0, 0 }},
263  [GREEN] = {
264  { 0, 0, 0, 0 },
265  { .75, .5, 0, -.5 },
266  { 1, 1, 0, 0 }},
267  [VIRIDIS] = {
268  { 0, 0, 0, 0 },
269  { 0.10, 0x39/255., (0x9D -128)/255., (0x8F -128)/255. },
270  { 0.23, 0x5C/255., (0x9A -128)/255., (0x68 -128)/255. },
271  { 0.35, 0x69/255., (0x93 -128)/255., (0x57 -128)/255. },
272  { 0.48, 0x76/255., (0x88 -128)/255., (0x4B -128)/255. },
273  { 0.64, 0x8A/255., (0x72 -128)/255., (0x4F -128)/255. },
274  { 0.80, 0xA3/255., (0x50 -128)/255., (0x66 -128)/255. },
275  { 1, 0xCC/255., (0x2F -128)/255., (0x87 -128)/255. }},
276  [PLASMA] = {
277  { 0, 0, 0, 0 },
278  { 0.10, 0x27/255., (0xC2 -128)/255., (0x82 -128)/255. },
279  { 0.58, 0x5B/255., (0x9A -128)/255., (0xAE -128)/255. },
280  { 0.70, 0x89/255., (0x44 -128)/255., (0xAB -128)/255. },
281  { 0.80, 0xB4/255., (0x2B -128)/255., (0x9E -128)/255. },
282  { 0.91, 0xD2/255., (0x38 -128)/255., (0x92 -128)/255. },
283  { 1, 1, 0, 0. }},
284  [CIVIDIS] = {
285  { 0, 0, 0, 0 },
286  { 0.20, 0x28/255., (0x98 -128)/255., (0x6F -128)/255. },
287  { 0.50, 0x48/255., (0x95 -128)/255., (0x74 -128)/255. },
288  { 0.63, 0x69/255., (0x84 -128)/255., (0x7F -128)/255. },
289  { 0.76, 0x89/255., (0x75 -128)/255., (0x84 -128)/255. },
290  { 0.90, 0xCE/255., (0x35 -128)/255., (0x95 -128)/255. },
291  { 1, 1, 0, 0. }},
292  [TERRAIN] = {
293  { 0, 0, 0, 0 },
294  { 0.15, 0, .5, 0 },
295  { 0.60, 1, -.5, -.5 },
296  { 0.85, 1, -.5, .5 },
297  { 1, 1, 0, 0 }},
298 };
299 
301 {
302  ShowSpectrumContext *s = ctx->priv;
303  int i;
304 
305  av_freep(&s->combine_buffer);
306  if (s->fft) {
307  for (i = 0; i < s->nb_display_channels; i++)
308  av_fft_end(s->fft[i]);
309  }
310  av_freep(&s->fft);
311  if (s->ifft) {
312  for (i = 0; i < s->nb_display_channels; i++)
313  av_fft_end(s->ifft[i]);
314  }
315  av_freep(&s->ifft);
316  if (s->fft_data) {
317  for (i = 0; i < s->nb_display_channels; i++)
318  av_freep(&s->fft_data[i]);
319  }
320  av_freep(&s->fft_data);
321  if (s->fft_scratch) {
322  for (i = 0; i < s->nb_display_channels; i++)
323  av_freep(&s->fft_scratch[i]);
324  }
325  av_freep(&s->fft_scratch);
326  if (s->color_buffer) {
327  for (i = 0; i < s->nb_display_channels; i++)
328  av_freep(&s->color_buffer[i]);
329  }
330  av_freep(&s->color_buffer);
331  av_freep(&s->window_func_lut);
332  if (s->magnitudes) {
333  for (i = 0; i < s->nb_display_channels; i++)
334  av_freep(&s->magnitudes[i]);
335  }
336  av_freep(&s->magnitudes);
337  av_frame_free(&s->outpicref);
338  av_audio_fifo_free(s->fifo);
339  if (s->phases) {
340  for (i = 0; i < s->nb_display_channels; i++)
341  av_freep(&s->phases[i]);
342  }
343  av_freep(&s->phases);
344 }
345 
347 {
350  AVFilterLink *inlink = ctx->inputs[0];
351  AVFilterLink *outlink = ctx->outputs[0];
354  int ret;
355 
356  /* set input audio formats */
358  if ((ret = ff_formats_ref(formats, &inlink->outcfg.formats)) < 0)
359  return ret;
360 
362  if ((ret = ff_channel_layouts_ref(layouts, &inlink->outcfg.channel_layouts)) < 0)
363  return ret;
364 
366  if ((ret = ff_formats_ref(formats, &inlink->outcfg.samplerates)) < 0)
367  return ret;
368 
369  /* set output video format */
371  if ((ret = ff_formats_ref(formats, &outlink->incfg.formats)) < 0)
372  return ret;
373 
374  return 0;
375 }
376 
377 static int run_channel_fft(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
378 {
379  ShowSpectrumContext *s = ctx->priv;
380  AVFilterLink *inlink = ctx->inputs[0];
381  const float *window_func_lut = s->window_func_lut;
382  AVFrame *fin = arg;
383  const int ch = jobnr;
384  int n;
385 
386  /* fill FFT input with the number of samples available */
387  const float *p = (float *)fin->extended_data[ch];
388 
389  for (n = 0; n < s->win_size; n++) {
390  s->fft_data[ch][n].re = p[n] * window_func_lut[n];
391  s->fft_data[ch][n].im = 0;
392  }
393 
394  if (s->stop) {
395  float theta, phi, psi, a, b, S, c;
396  FFTComplex *g = s->fft_data[ch];
397  FFTComplex *h = s->fft_scratch[ch];
398  int L = s->buf_size;
399  int N = s->win_size;
400  int M = s->win_size / 2;
401 
402  phi = 2.f * M_PI * (s->stop - s->start) / (float)inlink->sample_rate / (M - 1);
403  theta = 2.f * M_PI * s->start / (float)inlink->sample_rate;
404 
405  for (int n = 0; n < M; n++) {
406  h[n].re = cosf(n * n / 2.f * phi);
407  h[n].im = sinf(n * n / 2.f * phi);
408  }
409 
410  for (int n = M; n < L; n++) {
411  h[n].re = 0.f;
412  h[n].im = 0.f;
413  }
414 
415  for (int n = L - N; n < L; n++) {
416  h[n].re = cosf((L - n) * (L - n) / 2.f * phi);
417  h[n].im = sinf((L - n) * (L - n) / 2.f * phi);
418  }
419 
420  for (int n = 0; n < N; n++) {
421  g[n].re = s->fft_data[ch][n].re;
422  g[n].im = s->fft_data[ch][n].im;
423  }
424 
425  for (int n = N; n < L; n++) {
426  g[n].re = 0.f;
427  g[n].im = 0.f;
428  }
429 
430  for (int n = 0; n < N; n++) {
431  psi = n * theta + n * n / 2.f * phi;
432  c = cosf(psi);
433  S = -sinf(psi);
434  a = c * g[n].re - S * g[n].im;
435  b = S * g[n].re + c * g[n].im;
436  g[n].re = a;
437  g[n].im = b;
438  }
439 
440  av_fft_permute(s->fft[ch], h);
441  av_fft_calc(s->fft[ch], h);
442 
443  av_fft_permute(s->fft[ch], g);
444  av_fft_calc(s->fft[ch], g);
445 
446  for (int n = 0; n < L; n++) {
447  c = g[n].re;
448  S = g[n].im;
449  a = c * h[n].re - S * h[n].im;
450  b = S * h[n].re + c * h[n].im;
451 
452  g[n].re = a / L;
453  g[n].im = b / L;
454  }
455 
456  av_fft_permute(s->ifft[ch], g);
457  av_fft_calc(s->ifft[ch], g);
458 
459  for (int k = 0; k < M; k++) {
460  psi = k * k / 2.f * phi;
461  c = cosf(psi);
462  S = -sinf(psi);
463  a = c * g[k].re - S * g[k].im;
464  b = S * g[k].re + c * g[k].im;
465  s->fft_data[ch][k].re = a;
466  s->fft_data[ch][k].im = b;
467  }
468  } else {
469  /* run FFT on each samples set */
470  av_fft_permute(s->fft[ch], s->fft_data[ch]);
471  av_fft_calc(s->fft[ch], s->fft_data[ch]);
472  }
473 
474  return 0;
475 }
476 
477 static void drawtext(AVFrame *pic, int x, int y, const char *txt, int o)
478 {
479  const uint8_t *font;
480  int font_height;
481  int i;
482 
483  font = avpriv_cga_font, font_height = 8;
484 
485  for (i = 0; txt[i]; i++) {
486  int char_y, mask;
487 
488  if (o) {
489  for (char_y = font_height - 1; char_y >= 0; char_y--) {
490  uint8_t *p = pic->data[0] + (y + i * 10) * pic->linesize[0] + x;
491  for (mask = 0x80; mask; mask >>= 1) {
492  if (font[txt[i] * font_height + font_height - 1 - char_y] & mask)
493  p[char_y] = ~p[char_y];
494  p += pic->linesize[0];
495  }
496  }
497  } else {
498  uint8_t *p = pic->data[0] + y*pic->linesize[0] + (x + i*8);
499  for (char_y = 0; char_y < font_height; char_y++) {
500  for (mask = 0x80; mask; mask >>= 1) {
501  if (font[txt[i] * font_height + char_y] & mask)
502  *p = ~(*p);
503  p++;
504  }
505  p += pic->linesize[0] - 8;
506  }
507  }
508  }
509 }
510 
511 static void color_range(ShowSpectrumContext *s, int ch,
512  float *yf, float *uf, float *vf)
513 {
514  switch (s->mode) {
515  case COMBINED:
516  // reduce range by channel count
517  *yf = 256.0f / s->nb_display_channels;
518  switch (s->color_mode) {
519  case RAINBOW:
520  case MORELAND:
521  case NEBULAE:
522  case FIRE:
523  case FIERY:
524  case FRUIT:
525  case COOL:
526  case GREEN:
527  case VIRIDIS:
528  case PLASMA:
529  case CIVIDIS:
530  case TERRAIN:
531  case MAGMA:
532  case INTENSITY:
533  *uf = *yf;
534  *vf = *yf;
535  break;
536  case CHANNEL:
537  /* adjust saturation for mixed UV coloring */
538  /* this factor is correct for infinite channels, an approximation otherwise */
539  *uf = *yf * M_PI;
540  *vf = *yf * M_PI;
541  break;
542  default:
543  av_assert0(0);
544  }
545  break;
546  case SEPARATE:
547  // full range
548  *yf = 256.0f;
549  *uf = 256.0f;
550  *vf = 256.0f;
551  break;
552  default:
553  av_assert0(0);
554  }
555 
556  if (s->color_mode == CHANNEL) {
557  if (s->nb_display_channels > 1) {
558  *uf *= 0.5f * sinf((2 * M_PI * ch) / s->nb_display_channels + M_PI * s->rotation);
559  *vf *= 0.5f * cosf((2 * M_PI * ch) / s->nb_display_channels + M_PI * s->rotation);
560  } else {
561  *uf *= 0.5f * sinf(M_PI * s->rotation);
562  *vf *= 0.5f * cosf(M_PI * s->rotation + M_PI_2);
563  }
564  } else {
565  *uf += *uf * sinf(M_PI * s->rotation);
566  *vf += *vf * cosf(M_PI * s->rotation + M_PI_2);
567  }
568 
569  *uf *= s->saturation;
570  *vf *= s->saturation;
571 }
572 
574  float yf, float uf, float vf,
575  float a, float *out)
576 {
577  if (s->color_mode > CHANNEL) {
578  const int cm = s->color_mode;
579  float y, u, v;
580  int i;
581 
582  for (i = 1; i < FF_ARRAY_ELEMS(color_table[cm]) - 1; i++)
583  if (color_table[cm][i].a >= a)
584  break;
585  // i now is the first item >= the color
586  // now we know to interpolate between item i - 1 and i
587  if (a <= color_table[cm][i - 1].a) {
588  y = color_table[cm][i - 1].y;
589  u = color_table[cm][i - 1].u;
590  v = color_table[cm][i - 1].v;
591  } else if (a >= color_table[cm][i].a) {
592  y = color_table[cm][i].y;
593  u = color_table[cm][i].u;
594  v = color_table[cm][i].v;
595  } else {
596  float start = color_table[cm][i - 1].a;
597  float end = color_table[cm][i].a;
598  float lerpfrac = (a - start) / (end - start);
599  y = color_table[cm][i - 1].y * (1.0f - lerpfrac)
600  + color_table[cm][i].y * lerpfrac;
601  u = color_table[cm][i - 1].u * (1.0f - lerpfrac)
602  + color_table[cm][i].u * lerpfrac;
603  v = color_table[cm][i - 1].v * (1.0f - lerpfrac)
604  + color_table[cm][i].v * lerpfrac;
605  }
606 
607  out[0] = y * yf;
608  out[1] = u * uf;
609  out[2] = v * vf;
610  } else {
611  out[0] = a * yf;
612  out[1] = a * uf;
613  out[2] = a * vf;
614  }
615 }
616 
617 static char *get_time(AVFilterContext *ctx, float seconds, int x)
618 {
619  char *units;
620 
621  if (x == 0)
622  units = av_asprintf("0");
623  else if (log10(seconds) > 6)
624  units = av_asprintf("%.2fh", seconds / (60 * 60));
625  else if (log10(seconds) > 3)
626  units = av_asprintf("%.2fm", seconds / 60);
627  else
628  units = av_asprintf("%.2fs", seconds);
629  return units;
630 }
631 
632 static float log_scale(const float value, const float min, const float max)
633 {
634  if (value < min)
635  return min;
636  if (value > max)
637  return max;
638 
639  {
640  const float b = logf(max / min) / (max - min);
641  const float a = max / expf(max * b);
642 
643  return expf(value * b) * a;
644  }
645 }
646 
647 static float get_log_hz(const int bin, const int num_bins, const float sample_rate)
648 {
649  const float max_freq = sample_rate / 2;
650  const float hz_per_bin = max_freq / num_bins;
651  const float freq = hz_per_bin * bin;
652  const float scaled_freq = log_scale(freq + 1, 21, max_freq) - 1;
653 
654  return num_bins * scaled_freq / max_freq;
655 }
656 
657 static float inv_log_scale(const float value, const float min, const float max)
658 {
659  if (value < min)
660  return min;
661  if (value > max)
662  return max;
663 
664  {
665  const float b = logf(max / min) / (max - min);
666  const float a = max / expf(max * b);
667 
668  return logf(value / a) / b;
669  }
670 }
671 
672 static float bin_pos(const int bin, const int num_bins, const float sample_rate)
673 {
674  const float max_freq = sample_rate / 2;
675  const float hz_per_bin = max_freq / num_bins;
676  const float freq = hz_per_bin * bin;
677  const float scaled_freq = inv_log_scale(freq + 1, 21, max_freq) - 1;
678 
679  return num_bins * scaled_freq / max_freq;
680 }
681 
683 {
684  ShowSpectrumContext *s = ctx->priv;
685  AVFilterLink *inlink = ctx->inputs[0];
686  AVFilterLink *outlink = ctx->outputs[0];
687  int ch, y, x = 0, sz = s->orientation == VERTICAL ? s->w : s->h;
688  int multi = (s->mode == SEPARATE && s->color_mode == CHANNEL);
689  float spp = samples / (float)sz;
690  char *text;
691  uint8_t *dst;
692  char chlayout_str[128];
693 
694  av_get_channel_layout_string(chlayout_str, sizeof(chlayout_str), inlink->channels,
695  inlink->channel_layout);
696 
697  text = av_asprintf("%d Hz | %s", inlink->sample_rate, chlayout_str);
698  if (!text)
699  return AVERROR(ENOMEM);
700 
701  drawtext(s->outpicref, 2, outlink->h - 10, "CREATED BY LIBAVFILTER", 0);
702  drawtext(s->outpicref, outlink->w - 2 - strlen(text) * 10, outlink->h - 10, text, 0);
703  av_freep(&text);
704  if (s->stop) {
705  text = av_asprintf("Zoom: %d Hz - %d Hz", s->start, s->stop);
706  if (!text)
707  return AVERROR(ENOMEM);
708  drawtext(s->outpicref, outlink->w - 2 - strlen(text) * 10, 3, text, 0);
709  av_freep(&text);
710  }
711 
712  dst = s->outpicref->data[0] + (s->start_y - 1) * s->outpicref->linesize[0] + s->start_x - 1;
713  for (x = 0; x < s->w + 1; x++)
714  dst[x] = 200;
715  dst = s->outpicref->data[0] + (s->start_y + s->h) * s->outpicref->linesize[0] + s->start_x - 1;
716  for (x = 0; x < s->w + 1; x++)
717  dst[x] = 200;
718  for (y = 0; y < s->h + 2; y++) {
719  dst = s->outpicref->data[0] + (y + s->start_y - 1) * s->outpicref->linesize[0];
720  dst[s->start_x - 1] = 200;
721  dst[s->start_x + s->w] = 200;
722  }
723  if (s->orientation == VERTICAL) {
724  int h = s->mode == SEPARATE ? s->h / s->nb_display_channels : s->h;
725  int hh = s->mode == SEPARATE ? -(s->h % s->nb_display_channels) + 1 : 1;
726  for (ch = 0; ch < (s->mode == SEPARATE ? s->nb_display_channels : 1); ch++) {
727  for (y = 0; y < h; y += 20) {
728  dst = s->outpicref->data[0] + (s->start_y + h * (ch + 1) - y - hh) * s->outpicref->linesize[0];
729  dst[s->start_x - 2] = 200;
730  dst[s->start_x + s->w + 1] = 200;
731  }
732  for (y = 0; y < h; y += 40) {
733  dst = s->outpicref->data[0] + (s->start_y + h * (ch + 1) - y - hh) * s->outpicref->linesize[0];
734  dst[s->start_x - 3] = 200;
735  dst[s->start_x + s->w + 2] = 200;
736  }
737  dst = s->outpicref->data[0] + (s->start_y - 2) * s->outpicref->linesize[0] + s->start_x;
738  for (x = 0; x < s->w; x+=40)
739  dst[x] = 200;
740  dst = s->outpicref->data[0] + (s->start_y - 3) * s->outpicref->linesize[0] + s->start_x;
741  for (x = 0; x < s->w; x+=80)
742  dst[x] = 200;
743  dst = s->outpicref->data[0] + (s->h + s->start_y + 1) * s->outpicref->linesize[0] + s->start_x;
744  for (x = 0; x < s->w; x+=40) {
745  dst[x] = 200;
746  }
747  dst = s->outpicref->data[0] + (s->h + s->start_y + 2) * s->outpicref->linesize[0] + s->start_x;
748  for (x = 0; x < s->w; x+=80) {
749  dst[x] = 200;
750  }
751  for (y = 0; y < h; y += 40) {
752  float range = s->stop ? s->stop - s->start : inlink->sample_rate / 2;
753  float bin = s->fscale == F_LINEAR ? y : get_log_hz(y, h, inlink->sample_rate);
754  float hertz = s->start + bin * range / (float)(1 << (int)ceil(log2(h)));
755  char *units;
756 
757  if (hertz == 0)
758  units = av_asprintf("DC");
759  else
760  units = av_asprintf("%.2f", hertz);
761  if (!units)
762  return AVERROR(ENOMEM);
763 
764  drawtext(s->outpicref, s->start_x - 8 * strlen(units) - 4, h * (ch + 1) + s->start_y - y - 4 - hh, units, 0);
765  av_free(units);
766  }
767  }
768 
769  for (x = 0; x < s->w && s->single_pic; x+=80) {
770  float seconds = x * spp / inlink->sample_rate;
771  char *units = get_time(ctx, seconds, x);
772  if (!units)
773  return AVERROR(ENOMEM);
774 
775  drawtext(s->outpicref, s->start_x + x - 4 * strlen(units), s->h + s->start_y + 6, units, 0);
776  drawtext(s->outpicref, s->start_x + x - 4 * strlen(units), s->start_y - 12, units, 0);
777  av_free(units);
778  }
779 
780  drawtext(s->outpicref, outlink->w / 2 - 4 * 4, outlink->h - s->start_y / 2, "TIME", 0);
781  drawtext(s->outpicref, s->start_x / 7, outlink->h / 2 - 14 * 4, "FREQUENCY (Hz)", 1);
782  } else {
783  int w = s->mode == SEPARATE ? s->w / s->nb_display_channels : s->w;
784  for (y = 0; y < s->h; y += 20) {
785  dst = s->outpicref->data[0] + (s->start_y + y) * s->outpicref->linesize[0];
786  dst[s->start_x - 2] = 200;
787  dst[s->start_x + s->w + 1] = 200;
788  }
789  for (y = 0; y < s->h; y += 40) {
790  dst = s->outpicref->data[0] + (s->start_y + y) * s->outpicref->linesize[0];
791  dst[s->start_x - 3] = 200;
792  dst[s->start_x + s->w + 2] = 200;
793  }
794  for (ch = 0; ch < (s->mode == SEPARATE ? s->nb_display_channels : 1); ch++) {
795  dst = s->outpicref->data[0] + (s->start_y - 2) * s->outpicref->linesize[0] + s->start_x + w * ch;
796  for (x = 0; x < w; x+=40)
797  dst[x] = 200;
798  dst = s->outpicref->data[0] + (s->start_y - 3) * s->outpicref->linesize[0] + s->start_x + w * ch;
799  for (x = 0; x < w; x+=80)
800  dst[x] = 200;
801  dst = s->outpicref->data[0] + (s->h + s->start_y + 1) * s->outpicref->linesize[0] + s->start_x + w * ch;
802  for (x = 0; x < w; x+=40) {
803  dst[x] = 200;
804  }
805  dst = s->outpicref->data[0] + (s->h + s->start_y + 2) * s->outpicref->linesize[0] + s->start_x + w * ch;
806  for (x = 0; x < w; x+=80) {
807  dst[x] = 200;
808  }
809  for (x = 0; x < w - 79; x += 80) {
810  float range = s->stop ? s->stop - s->start : inlink->sample_rate / 2;
811  float bin = s->fscale == F_LINEAR ? x : get_log_hz(x, w, inlink->sample_rate);
812  float hertz = s->start + bin * range / (float)(1 << (int)ceil(log2(w)));
813  char *units;
814 
815  if (hertz == 0)
816  units = av_asprintf("DC");
817  else
818  units = av_asprintf("%.2f", hertz);
819  if (!units)
820  return AVERROR(ENOMEM);
821 
822  drawtext(s->outpicref, s->start_x - 4 * strlen(units) + x + w * ch, s->start_y - 12, units, 0);
823  drawtext(s->outpicref, s->start_x - 4 * strlen(units) + x + w * ch, s->h + s->start_y + 6, units, 0);
824  av_free(units);
825  }
826  }
827  for (y = 0; y < s->h && s->single_pic; y+=40) {
828  float seconds = y * spp / inlink->sample_rate;
829  char *units = get_time(ctx, seconds, x);
830  if (!units)
831  return AVERROR(ENOMEM);
832 
833  drawtext(s->outpicref, s->start_x - 8 * strlen(units) - 4, s->start_y + y - 4, units, 0);
834  av_free(units);
835  }
836  drawtext(s->outpicref, s->start_x / 7, outlink->h / 2 - 4 * 4, "TIME", 1);
837  drawtext(s->outpicref, outlink->w / 2 - 14 * 4, outlink->h - s->start_y / 2, "FREQUENCY (Hz)", 0);
838  }
839 
840  for (ch = 0; ch < (multi ? s->nb_display_channels : 1); ch++) {
841  int h = multi ? s->h / s->nb_display_channels : s->h;
842 
843  for (y = 0; y < h; y++) {
844  float out[3] = { 0., 127.5, 127.5};
845  int chn;
846 
847  for (chn = 0; chn < (s->mode == SEPARATE ? 1 : s->nb_display_channels); chn++) {
848  float yf, uf, vf;
849  int channel = (multi) ? s->nb_display_channels - ch - 1 : chn;
850  float lout[3];
851 
852  color_range(s, channel, &yf, &uf, &vf);
853  pick_color(s, yf, uf, vf, y / (float)h, lout);
854  out[0] += lout[0];
855  out[1] += lout[1];
856  out[2] += lout[2];
857  }
858  memset(s->outpicref->data[0]+(s->start_y + h * (ch + 1) - y - 1) * s->outpicref->linesize[0] + s->w + s->start_x + 20, av_clip_uint8(out[0]), 10);
859  memset(s->outpicref->data[1]+(s->start_y + h * (ch + 1) - y - 1) * s->outpicref->linesize[1] + s->w + s->start_x + 20, av_clip_uint8(out[1]), 10);
860  memset(s->outpicref->data[2]+(s->start_y + h * (ch + 1) - y - 1) * s->outpicref->linesize[2] + s->w + s->start_x + 20, av_clip_uint8(out[2]), 10);
861  }
862 
863  for (y = 0; ch == 0 && y < h; y += h / 10) {
864  float value = 120.f * log10f(1.f - y / (float)h);
865  char *text;
866 
867  if (value < -120)
868  break;
869  text = av_asprintf("%.0f dB", value);
870  if (!text)
871  continue;
872  drawtext(s->outpicref, s->w + s->start_x + 35, s->start_y + y - 5, text, 0);
873  av_free(text);
874  }
875  }
876 
877  return 0;
878 }
879 
880 static float get_value(AVFilterContext *ctx, int ch, int y)
881 {
882  ShowSpectrumContext *s = ctx->priv;
883  float *magnitudes = s->magnitudes[ch];
884  float *phases = s->phases[ch];
885  float a;
886 
887  switch (s->data) {
888  case D_MAGNITUDE:
889  /* get magnitude */
890  a = magnitudes[y];
891  break;
892  case D_PHASE:
893  /* get phase */
894  a = phases[y];
895  break;
896  default:
897  av_assert0(0);
898  }
899 
900  /* apply scale */
901  switch (s->scale) {
902  case LINEAR:
903  a = av_clipf(a, 0, 1);
904  break;
905  case SQRT:
906  a = av_clipf(sqrtf(a), 0, 1);
907  break;
908  case CBRT:
909  a = av_clipf(cbrtf(a), 0, 1);
910  break;
911  case FOURTHRT:
912  a = av_clipf(sqrtf(sqrtf(a)), 0, 1);
913  break;
914  case FIFTHRT:
915  a = av_clipf(powf(a, 0.20), 0, 1);
916  break;
917  case LOG:
918  a = 1.f + log10f(av_clipf(a, 1e-6, 1)) / 6.f; // zero = -120dBFS
919  break;
920  default:
921  av_assert0(0);
922  }
923 
924  return a;
925 }
926 
927 static int plot_channel_lin(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
928 {
929  ShowSpectrumContext *s = ctx->priv;
930  const int h = s->orientation == VERTICAL ? s->channel_height : s->channel_width;
931  const int ch = jobnr;
932  float yf, uf, vf;
933  int y;
934 
935  /* decide color range */
936  color_range(s, ch, &yf, &uf, &vf);
937 
938  /* draw the channel */
939  for (y = 0; y < h; y++) {
940  int row = (s->mode == COMBINED) ? y : ch * h + y;
941  float *out = &s->color_buffer[ch][3 * row];
942  float a = get_value(ctx, ch, y);
943 
944  pick_color(s, yf, uf, vf, a, out);
945  }
946 
947  return 0;
948 }
949 
950 static int plot_channel_log(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
951 {
952  ShowSpectrumContext *s = ctx->priv;
953  AVFilterLink *inlink = ctx->inputs[0];
954  const int h = s->orientation == VERTICAL ? s->channel_height : s->channel_width;
955  const int ch = jobnr;
956  float y, yf, uf, vf;
957  int yy = 0;
958 
959  /* decide color range */
960  color_range(s, ch, &yf, &uf, &vf);
961 
962  /* draw the channel */
963  for (y = 0; y < h && yy < h; yy++) {
964  float pos0 = bin_pos(yy+0, h, inlink->sample_rate);
965  float pos1 = bin_pos(yy+1, h, inlink->sample_rate);
966  float delta = pos1 - pos0;
967  float a0, a1;
968 
969  a0 = get_value(ctx, ch, yy+0);
970  a1 = get_value(ctx, ch, FFMIN(yy+1, h-1));
971  for (float j = pos0; j < pos1 && y + j - pos0 < h; j++) {
972  float row = (s->mode == COMBINED) ? y + j - pos0 : ch * h + y + j - pos0;
973  float *out = &s->color_buffer[ch][3 * FFMIN(lrintf(row), h-1)];
974  float lerpfrac = (j - pos0) / delta;
975 
976  pick_color(s, yf, uf, vf, lerpfrac * a1 + (1.f-lerpfrac) * a0, out);
977  }
978  y += delta;
979  }
980 
981  return 0;
982 }
983 
984 static int config_output(AVFilterLink *outlink)
985 {
986  AVFilterContext *ctx = outlink->src;
987  AVFilterLink *inlink = ctx->inputs[0];
988  ShowSpectrumContext *s = ctx->priv;
989  int i, fft_bits, h, w;
990  float overlap;
991 
992  switch (s->fscale) {
993  case F_LINEAR: s->plot_channel = plot_channel_lin; break;
994  case F_LOG: s->plot_channel = plot_channel_log; break;
995  default: return AVERROR_BUG;
996  }
997 
998  s->stop = FFMIN(s->stop, inlink->sample_rate / 2);
999  if (s->stop && s->stop <= s->start) {
1000  av_log(ctx, AV_LOG_ERROR, "Stop frequency should be greater than start.\n");
1001  return AVERROR(EINVAL);
1002  }
1003 
1004  if (!strcmp(ctx->filter->name, "showspectrumpic"))
1005  s->single_pic = 1;
1006 
1007  outlink->w = s->w;
1008  outlink->h = s->h;
1009  outlink->sample_aspect_ratio = (AVRational){1,1};
1010 
1011  if (s->legend) {
1012  s->start_x = (log10(inlink->sample_rate) + 1) * 25;
1013  s->start_y = 64;
1014  outlink->w += s->start_x * 2;
1015  outlink->h += s->start_y * 2;
1016  }
1017 
1018  h = (s->mode == COMBINED || s->orientation == HORIZONTAL) ? s->h : s->h / inlink->channels;
1019  w = (s->mode == COMBINED || s->orientation == VERTICAL) ? s->w : s->w / inlink->channels;
1020  s->channel_height = h;
1021  s->channel_width = w;
1022 
1023  if (s->orientation == VERTICAL) {
1024  /* FFT window size (precision) according to the requested output frame height */
1025  for (fft_bits = 1; 1 << fft_bits < 2 * h; fft_bits++);
1026  } else {
1027  /* FFT window size (precision) according to the requested output frame width */
1028  for (fft_bits = 1; 1 << fft_bits < 2 * w; fft_bits++);
1029  }
1030 
1031  s->win_size = 1 << fft_bits;
1032  s->buf_size = s->win_size << !!s->stop;
1033 
1034  if (!s->fft) {
1035  s->fft = av_calloc(inlink->channels, sizeof(*s->fft));
1036  if (!s->fft)
1037  return AVERROR(ENOMEM);
1038  }
1039 
1040  if (s->stop) {
1041  if (!s->ifft) {
1042  s->ifft = av_calloc(inlink->channels, sizeof(*s->ifft));
1043  if (!s->ifft)
1044  return AVERROR(ENOMEM);
1045  }
1046  }
1047 
1048  /* (re-)configuration if the video output changed (or first init) */
1049  if (fft_bits != s->fft_bits) {
1050  AVFrame *outpicref;
1051 
1052  s->fft_bits = fft_bits;
1053 
1054  /* FFT buffers: x2 for each (display) channel buffer.
1055  * Note: we use free and malloc instead of a realloc-like function to
1056  * make sure the buffer is aligned in memory for the FFT functions. */
1057  for (i = 0; i < s->nb_display_channels; i++) {
1058  if (s->stop) {
1059  av_fft_end(s->ifft[i]);
1060  av_freep(&s->fft_scratch[i]);
1061  }
1062  av_fft_end(s->fft[i]);
1063  av_freep(&s->fft_data[i]);
1064  }
1065  av_freep(&s->fft_data);
1066 
1067  s->nb_display_channels = inlink->channels;
1068  for (i = 0; i < s->nb_display_channels; i++) {
1069  s->fft[i] = av_fft_init(fft_bits + !!s->stop, 0);
1070  if (s->stop) {
1071  s->ifft[i] = av_fft_init(fft_bits + !!s->stop, 1);
1072  if (!s->ifft[i]) {
1073  av_log(ctx, AV_LOG_ERROR, "Unable to create Inverse FFT context. "
1074  "The window size might be too high.\n");
1075  return AVERROR(EINVAL);
1076  }
1077  }
1078  if (!s->fft[i]) {
1079  av_log(ctx, AV_LOG_ERROR, "Unable to create FFT context. "
1080  "The window size might be too high.\n");
1081  return AVERROR(EINVAL);
1082  }
1083  }
1084 
1085  s->magnitudes = av_calloc(s->nb_display_channels, sizeof(*s->magnitudes));
1086  if (!s->magnitudes)
1087  return AVERROR(ENOMEM);
1088  for (i = 0; i < s->nb_display_channels; i++) {
1089  s->magnitudes[i] = av_calloc(s->orientation == VERTICAL ? s->h : s->w, sizeof(**s->magnitudes));
1090  if (!s->magnitudes[i])
1091  return AVERROR(ENOMEM);
1092  }
1093 
1094  s->phases = av_calloc(s->nb_display_channels, sizeof(*s->phases));
1095  if (!s->phases)
1096  return AVERROR(ENOMEM);
1097  for (i = 0; i < s->nb_display_channels; i++) {
1098  s->phases[i] = av_calloc(s->orientation == VERTICAL ? s->h : s->w, sizeof(**s->phases));
1099  if (!s->phases[i])
1100  return AVERROR(ENOMEM);
1101  }
1102 
1103  av_freep(&s->color_buffer);
1104  s->color_buffer = av_calloc(s->nb_display_channels, sizeof(*s->color_buffer));
1105  if (!s->color_buffer)
1106  return AVERROR(ENOMEM);
1107  for (i = 0; i < s->nb_display_channels; i++) {
1108  s->color_buffer[i] = av_calloc(s->orientation == VERTICAL ? s->h * 3 : s->w * 3, sizeof(**s->color_buffer));
1109  if (!s->color_buffer[i])
1110  return AVERROR(ENOMEM);
1111  }
1112 
1113  s->fft_data = av_calloc(s->nb_display_channels, sizeof(*s->fft_data));
1114  if (!s->fft_data)
1115  return AVERROR(ENOMEM);
1116  s->fft_scratch = av_calloc(s->nb_display_channels, sizeof(*s->fft_scratch));
1117  if (!s->fft_scratch)
1118  return AVERROR(ENOMEM);
1119  for (i = 0; i < s->nb_display_channels; i++) {
1120  s->fft_data[i] = av_calloc(s->buf_size, sizeof(**s->fft_data));
1121  if (!s->fft_data[i])
1122  return AVERROR(ENOMEM);
1123 
1124  s->fft_scratch[i] = av_calloc(s->buf_size, sizeof(**s->fft_scratch));
1125  if (!s->fft_scratch[i])
1126  return AVERROR(ENOMEM);
1127  }
1128 
1129  /* pre-calc windowing function */
1130  s->window_func_lut =
1131  av_realloc_f(s->window_func_lut, s->win_size,
1132  sizeof(*s->window_func_lut));
1133  if (!s->window_func_lut)
1134  return AVERROR(ENOMEM);
1135  generate_window_func(s->window_func_lut, s->win_size, s->win_func, &overlap);
1136  if (s->overlap == 1)
1137  s->overlap = overlap;
1138  s->hop_size = (1.f - s->overlap) * s->win_size;
1139  if (s->hop_size < 1) {
1140  av_log(ctx, AV_LOG_ERROR, "overlap %f too big\n", s->overlap);
1141  return AVERROR(EINVAL);
1142  }
1143 
1144  for (s->win_scale = 0, i = 0; i < s->win_size; i++) {
1145  s->win_scale += s->window_func_lut[i] * s->window_func_lut[i];
1146  }
1147  s->win_scale = 1.f / sqrtf(s->win_scale);
1148 
1149  /* prepare the initial picref buffer (black frame) */
1150  av_frame_free(&s->outpicref);
1151  s->outpicref = outpicref =
1152  ff_get_video_buffer(outlink, outlink->w, outlink->h);
1153  if (!outpicref)
1154  return AVERROR(ENOMEM);
1155  outpicref->sample_aspect_ratio = (AVRational){1,1};
1156  for (i = 0; i < outlink->h; i++) {
1157  memset(outpicref->data[0] + i * outpicref->linesize[0], 0, outlink->w);
1158  memset(outpicref->data[1] + i * outpicref->linesize[1], 128, outlink->w);
1159  memset(outpicref->data[2] + i * outpicref->linesize[2], 128, outlink->w);
1160  }
1161  outpicref->color_range = AVCOL_RANGE_JPEG;
1162 
1163  if (!s->single_pic && s->legend)
1164  draw_legend(ctx, 0);
1165  }
1166 
1167  if ((s->orientation == VERTICAL && s->xpos >= s->w) ||
1168  (s->orientation == HORIZONTAL && s->xpos >= s->h))
1169  s->xpos = 0;
1170 
1171  s->auto_frame_rate = av_make_q(inlink->sample_rate, s->hop_size);
1172  if (s->orientation == VERTICAL && s->sliding == FULLFRAME)
1173  s->auto_frame_rate.den *= s->w;
1174  if (s->orientation == HORIZONTAL && s->sliding == FULLFRAME)
1175  s->auto_frame_rate.den *= s->h;
1176  if (!s->single_pic && strcmp(s->rate_str, "auto")) {
1177  int ret = av_parse_video_rate(&s->frame_rate, s->rate_str);
1178  if (ret < 0)
1179  return ret;
1180  } else {
1181  s->frame_rate = s->auto_frame_rate;
1182  }
1183  outlink->frame_rate = s->frame_rate;
1184  outlink->time_base = av_inv_q(outlink->frame_rate);
1185 
1186  if (s->orientation == VERTICAL) {
1187  s->combine_buffer =
1188  av_realloc_f(s->combine_buffer, s->h * 3,
1189  sizeof(*s->combine_buffer));
1190  } else {
1191  s->combine_buffer =
1192  av_realloc_f(s->combine_buffer, s->w * 3,
1193  sizeof(*s->combine_buffer));
1194  }
1195 
1196  av_log(ctx, AV_LOG_VERBOSE, "s:%dx%d FFT window size:%d\n",
1197  s->w, s->h, s->win_size);
1198 
1199  av_audio_fifo_free(s->fifo);
1200  s->fifo = av_audio_fifo_alloc(inlink->format, inlink->channels, s->win_size);
1201  if (!s->fifo)
1202  return AVERROR(ENOMEM);
1203  return 0;
1204 }
1205 
1206 #define RE(y, ch) s->fft_data[ch][y].re
1207 #define IM(y, ch) s->fft_data[ch][y].im
1208 #define MAGNITUDE(y, ch) hypotf(RE(y, ch), IM(y, ch))
1209 #define PHASE(y, ch) atan2f(IM(y, ch), RE(y, ch))
1210 
1211 static int calc_channel_magnitudes(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
1212 {
1213  ShowSpectrumContext *s = ctx->priv;
1214  const double w = s->win_scale * (s->scale == LOG ? s->win_scale : 1);
1215  int y, h = s->orientation == VERTICAL ? s->h : s->w;
1216  const float f = s->gain * w;
1217  const int ch = jobnr;
1218  float *magnitudes = s->magnitudes[ch];
1219 
1220  for (y = 0; y < h; y++)
1221  magnitudes[y] = MAGNITUDE(y, ch) * f;
1222 
1223  return 0;
1224 }
1225 
1226 static int calc_channel_phases(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
1227 {
1228  ShowSpectrumContext *s = ctx->priv;
1229  const int h = s->orientation == VERTICAL ? s->h : s->w;
1230  const int ch = jobnr;
1231  float *phases = s->phases[ch];
1232  int y;
1233 
1234  for (y = 0; y < h; y++)
1235  phases[y] = (PHASE(y, ch) / M_PI + 1) / 2;
1236 
1237  return 0;
1238 }
1239 
1241 {
1242  const double w = s->win_scale * (s->scale == LOG ? s->win_scale : 1);
1243  int ch, y, h = s->orientation == VERTICAL ? s->h : s->w;
1244  const float f = s->gain * w;
1245 
1246  for (ch = 0; ch < s->nb_display_channels; ch++) {
1247  float *magnitudes = s->magnitudes[ch];
1248 
1249  for (y = 0; y < h; y++)
1250  magnitudes[y] += MAGNITUDE(y, ch) * f;
1251  }
1252 }
1253 
1254 static void scale_magnitudes(ShowSpectrumContext *s, float scale)
1255 {
1256  int ch, y, h = s->orientation == VERTICAL ? s->h : s->w;
1257 
1258  for (ch = 0; ch < s->nb_display_channels; ch++) {
1259  float *magnitudes = s->magnitudes[ch];
1260 
1261  for (y = 0; y < h; y++)
1262  magnitudes[y] *= scale;
1263  }
1264 }
1265 
1267 {
1268  int y;
1269 
1270  for (y = 0; y < size; y++) {
1271  s->combine_buffer[3 * y ] = 0;
1272  s->combine_buffer[3 * y + 1] = 127.5;
1273  s->combine_buffer[3 * y + 2] = 127.5;
1274  }
1275 }
1276 
1278 {
1279  AVFilterContext *ctx = inlink->dst;
1280  AVFilterLink *outlink = ctx->outputs[0];
1281  ShowSpectrumContext *s = ctx->priv;
1282  AVFrame *outpicref = s->outpicref;
1283  int ret, plane, x, y, z = s->orientation == VERTICAL ? s->h : s->w;
1284 
1285  /* fill a new spectrum column */
1286  /* initialize buffer for combining to black */
1287  clear_combine_buffer(s, z);
1288 
1289  ctx->internal->execute(ctx, s->plot_channel, NULL, NULL, s->nb_display_channels);
1290 
1291  for (y = 0; y < z * 3; y++) {
1292  for (x = 0; x < s->nb_display_channels; x++) {
1293  s->combine_buffer[y] += s->color_buffer[x][y];
1294  }
1295  }
1296 
1297  av_frame_make_writable(s->outpicref);
1298  /* copy to output */
1299  if (s->orientation == VERTICAL) {
1300  if (s->sliding == SCROLL) {
1301  for (plane = 0; plane < 3; plane++) {
1302  for (y = 0; y < s->h; y++) {
1303  uint8_t *p = outpicref->data[plane] + s->start_x +
1304  (y + s->start_y) * outpicref->linesize[plane];
1305  memmove(p, p + 1, s->w - 1);
1306  }
1307  }
1308  s->xpos = s->w - 1;
1309  } else if (s->sliding == RSCROLL) {
1310  for (plane = 0; plane < 3; plane++) {
1311  for (y = 0; y < s->h; y++) {
1312  uint8_t *p = outpicref->data[plane] + s->start_x +
1313  (y + s->start_y) * outpicref->linesize[plane];
1314  memmove(p + 1, p, s->w - 1);
1315  }
1316  }
1317  s->xpos = 0;
1318  }
1319  for (plane = 0; plane < 3; plane++) {
1320  uint8_t *p = outpicref->data[plane] + s->start_x +
1321  (outlink->h - 1 - s->start_y) * outpicref->linesize[plane] +
1322  s->xpos;
1323  for (y = 0; y < s->h; y++) {
1324  *p = lrintf(av_clipf(s->combine_buffer[3 * y + plane], 0, 255));
1325  p -= outpicref->linesize[plane];
1326  }
1327  }
1328  } else {
1329  if (s->sliding == SCROLL) {
1330  for (plane = 0; plane < 3; plane++) {
1331  for (y = 1; y < s->h; y++) {
1332  memmove(outpicref->data[plane] + (y-1 + s->start_y) * outpicref->linesize[plane] + s->start_x,
1333  outpicref->data[plane] + (y + s->start_y) * outpicref->linesize[plane] + s->start_x,
1334  s->w);
1335  }
1336  }
1337  s->xpos = s->h - 1;
1338  } else if (s->sliding == RSCROLL) {
1339  for (plane = 0; plane < 3; plane++) {
1340  for (y = s->h - 1; y >= 1; y--) {
1341  memmove(outpicref->data[plane] + (y + s->start_y) * outpicref->linesize[plane] + s->start_x,
1342  outpicref->data[plane] + (y-1 + s->start_y) * outpicref->linesize[plane] + s->start_x,
1343  s->w);
1344  }
1345  }
1346  s->xpos = 0;
1347  }
1348  for (plane = 0; plane < 3; plane++) {
1349  uint8_t *p = outpicref->data[plane] + s->start_x +
1350  (s->xpos + s->start_y) * outpicref->linesize[plane];
1351  for (x = 0; x < s->w; x++) {
1352  *p = lrintf(av_clipf(s->combine_buffer[3 * x + plane], 0, 255));
1353  p++;
1354  }
1355  }
1356  }
1357 
1358  if (s->sliding != FULLFRAME || s->xpos == 0)
1359  outpicref->pts = av_rescale_q(insamples->pts, inlink->time_base, outlink->time_base);
1360 
1361  s->xpos++;
1362  if (s->orientation == VERTICAL && s->xpos >= s->w)
1363  s->xpos = 0;
1364  if (s->orientation == HORIZONTAL && s->xpos >= s->h)
1365  s->xpos = 0;
1366  if (!s->single_pic && (s->sliding != FULLFRAME || s->xpos == 0)) {
1367  if (s->old_pts < outpicref->pts) {
1368  AVFrame *clone;
1369 
1370  if (s->legend) {
1371  char *units = get_time(ctx, insamples->pts /(float)inlink->sample_rate, x);
1372  if (!units)
1373  return AVERROR(ENOMEM);
1374 
1375  if (s->orientation == VERTICAL) {
1376  for (y = 0; y < 10; y++) {
1377  memset(s->outpicref->data[0] + outlink->w / 2 - 4 * s->old_len +
1378  (outlink->h - s->start_y / 2 - 20 + y) * s->outpicref->linesize[0], 0, 10 * s->old_len);
1379  }
1380  drawtext(s->outpicref,
1381  outlink->w / 2 - 4 * strlen(units),
1382  outlink->h - s->start_y / 2 - 20,
1383  units, 0);
1384  } else {
1385  for (y = 0; y < 10 * s->old_len; y++) {
1386  memset(s->outpicref->data[0] + s->start_x / 7 + 20 +
1387  (outlink->h / 2 - 4 * s->old_len + y) * s->outpicref->linesize[0], 0, 10);
1388  }
1389  drawtext(s->outpicref,
1390  s->start_x / 7 + 20,
1391  outlink->h / 2 - 4 * strlen(units),
1392  units, 1);
1393  }
1394  s->old_len = strlen(units);
1395  av_free(units);
1396  }
1397  s->old_pts = outpicref->pts;
1398  clone = av_frame_clone(s->outpicref);
1399  if (!clone)
1400  return AVERROR(ENOMEM);
1401  ret = ff_filter_frame(outlink, clone);
1402  if (ret < 0)
1403  return ret;
1404  return 0;
1405  }
1406  }
1407 
1408  return 1;
1409 }
1410 
1411 #if CONFIG_SHOWSPECTRUM_FILTER
1412 
1413 static int activate(AVFilterContext *ctx)
1414 {
1415  AVFilterLink *inlink = ctx->inputs[0];
1416  AVFilterLink *outlink = ctx->outputs[0];
1417  ShowSpectrumContext *s = ctx->priv;
1418  int ret;
1419 
1421 
1422  if (av_audio_fifo_size(s->fifo) < s->win_size) {
1423  AVFrame *frame = NULL;
1424 
1426  if (ret < 0)
1427  return ret;
1428  if (ret > 0) {
1429  s->pts = frame->pts;
1430  s->consumed = 0;
1431 
1432  av_audio_fifo_write(s->fifo, (void **)frame->extended_data, frame->nb_samples);
1433  av_frame_free(&frame);
1434  }
1435  }
1436 
1437  if (s->outpicref && (av_audio_fifo_size(s->fifo) >= s->win_size ||
1439  AVFrame *fin = ff_get_audio_buffer(inlink, s->win_size);
1440  if (!fin)
1441  return AVERROR(ENOMEM);
1442 
1443  fin->pts = s->pts + s->consumed;
1444  s->consumed += s->hop_size;
1445  ret = av_audio_fifo_peek(s->fifo, (void **)fin->extended_data,
1446  FFMIN(s->win_size, av_audio_fifo_size(s->fifo)));
1447  if (ret < 0) {
1448  av_frame_free(&fin);
1449  return ret;
1450  }
1451 
1452  av_assert0(fin->nb_samples == s->win_size);
1453 
1454  ctx->internal->execute(ctx, run_channel_fft, fin, NULL, s->nb_display_channels);
1455 
1456  if (s->data == D_MAGNITUDE)
1457  ctx->internal->execute(ctx, calc_channel_magnitudes, NULL, NULL, s->nb_display_channels);
1458 
1459  if (s->data == D_PHASE)
1460  ctx->internal->execute(ctx, calc_channel_phases, NULL, NULL, s->nb_display_channels);
1461 
1463 
1464  av_frame_free(&fin);
1465  av_audio_fifo_drain(s->fifo, s->hop_size);
1466  if (ret <= 0 && !ff_outlink_get_status(inlink))
1467  return ret;
1468  }
1469 
1471  s->sliding == FULLFRAME &&
1472  s->xpos > 0 && s->outpicref) {
1473  int64_t pts;
1474 
1475  if (s->orientation == VERTICAL) {
1476  for (int i = 0; i < outlink->h; i++) {
1477  memset(s->outpicref->data[0] + i * s->outpicref->linesize[0] + s->xpos, 0, outlink->w - s->xpos);
1478  memset(s->outpicref->data[1] + i * s->outpicref->linesize[1] + s->xpos, 128, outlink->w - s->xpos);
1479  memset(s->outpicref->data[2] + i * s->outpicref->linesize[2] + s->xpos, 128, outlink->w - s->xpos);
1480  }
1481  } else {
1482  for (int i = s->xpos; i < outlink->h; i++) {
1483  memset(s->outpicref->data[0] + i * s->outpicref->linesize[0], 0, outlink->w);
1484  memset(s->outpicref->data[1] + i * s->outpicref->linesize[1], 128, outlink->w);
1485  memset(s->outpicref->data[2] + i * s->outpicref->linesize[2], 128, outlink->w);
1486  }
1487  }
1488  s->outpicref->pts += s->consumed;
1489  pts = s->outpicref->pts;
1490  ret = ff_filter_frame(outlink, s->outpicref);
1491  s->outpicref = NULL;
1493  return 0;
1494  }
1495 
1496  FF_FILTER_FORWARD_STATUS(inlink, outlink);
1497  if (av_audio_fifo_size(s->fifo) >= s->win_size ||
1499  ff_filter_set_ready(ctx, 10);
1500  return 0;
1501  }
1502 
1503  if (ff_outlink_frame_wanted(outlink) && av_audio_fifo_size(s->fifo) < s->win_size &&
1506  return 0;
1507  }
1508 
1509  return FFERROR_NOT_READY;
1510 }
1511 
1512 static const AVFilterPad showspectrum_inputs[] = {
1513  {
1514  .name = "default",
1515  .type = AVMEDIA_TYPE_AUDIO,
1516  },
1517  { NULL }
1518 };
1519 
1520 static const AVFilterPad showspectrum_outputs[] = {
1521  {
1522  .name = "default",
1523  .type = AVMEDIA_TYPE_VIDEO,
1524  .config_props = config_output,
1525  },
1526  { NULL }
1527 };
1528 
1530  .name = "showspectrum",
1531  .description = NULL_IF_CONFIG_SMALL("Convert input audio to a spectrum video output."),
1532  .uninit = uninit,
1533  .query_formats = query_formats,
1534  .priv_size = sizeof(ShowSpectrumContext),
1535  .inputs = showspectrum_inputs,
1536  .outputs = showspectrum_outputs,
1537  .activate = activate,
1538  .priv_class = &showspectrum_class,
1540 };
1541 #endif // CONFIG_SHOWSPECTRUM_FILTER
1542 
1543 #if CONFIG_SHOWSPECTRUMPIC_FILTER
1544 
1545 static const AVOption showspectrumpic_options[] = {
1546  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "4096x2048"}, 0, 0, FLAGS },
1547  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "4096x2048"}, 0, 0, FLAGS },
1548  { "mode", "set channel display mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=COMBINED}, 0, NB_MODES-1, FLAGS, "mode" },
1549  { "combined", "combined mode", 0, AV_OPT_TYPE_CONST, {.i64=COMBINED}, 0, 0, FLAGS, "mode" },
1550  { "separate", "separate mode", 0, AV_OPT_TYPE_CONST, {.i64=SEPARATE}, 0, 0, FLAGS, "mode" },
1551  { "color", "set channel coloring", OFFSET(color_mode), AV_OPT_TYPE_INT, {.i64=INTENSITY}, 0, NB_CLMODES-1, FLAGS, "color" },
1552  { "channel", "separate color for each channel", 0, AV_OPT_TYPE_CONST, {.i64=CHANNEL}, 0, 0, FLAGS, "color" },
1553  { "intensity", "intensity based coloring", 0, AV_OPT_TYPE_CONST, {.i64=INTENSITY}, 0, 0, FLAGS, "color" },
1554  { "rainbow", "rainbow based coloring", 0, AV_OPT_TYPE_CONST, {.i64=RAINBOW}, 0, 0, FLAGS, "color" },
1555  { "moreland", "moreland based coloring", 0, AV_OPT_TYPE_CONST, {.i64=MORELAND}, 0, 0, FLAGS, "color" },
1556  { "nebulae", "nebulae based coloring", 0, AV_OPT_TYPE_CONST, {.i64=NEBULAE}, 0, 0, FLAGS, "color" },
1557  { "fire", "fire based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FIRE}, 0, 0, FLAGS, "color" },
1558  { "fiery", "fiery based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FIERY}, 0, 0, FLAGS, "color" },
1559  { "fruit", "fruit based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FRUIT}, 0, 0, FLAGS, "color" },
1560  { "cool", "cool based coloring", 0, AV_OPT_TYPE_CONST, {.i64=COOL}, 0, 0, FLAGS, "color" },
1561  { "magma", "magma based coloring", 0, AV_OPT_TYPE_CONST, {.i64=MAGMA}, 0, 0, FLAGS, "color" },
1562  { "green", "green based coloring", 0, AV_OPT_TYPE_CONST, {.i64=GREEN}, 0, 0, FLAGS, "color" },
1563  { "viridis", "viridis based coloring", 0, AV_OPT_TYPE_CONST, {.i64=VIRIDIS}, 0, 0, FLAGS, "color" },
1564  { "plasma", "plasma based coloring", 0, AV_OPT_TYPE_CONST, {.i64=PLASMA}, 0, 0, FLAGS, "color" },
1565  { "cividis", "cividis based coloring", 0, AV_OPT_TYPE_CONST, {.i64=CIVIDIS}, 0, 0, FLAGS, "color" },
1566  { "terrain", "terrain based coloring", 0, AV_OPT_TYPE_CONST, {.i64=TERRAIN}, 0, 0, FLAGS, "color" },
1567  { "scale", "set display scale", OFFSET(scale), AV_OPT_TYPE_INT, {.i64=LOG}, 0, NB_SCALES-1, FLAGS, "scale" },
1568  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=LINEAR}, 0, 0, FLAGS, "scale" },
1569  { "sqrt", "square root", 0, AV_OPT_TYPE_CONST, {.i64=SQRT}, 0, 0, FLAGS, "scale" },
1570  { "cbrt", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64=CBRT}, 0, 0, FLAGS, "scale" },
1571  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=LOG}, 0, 0, FLAGS, "scale" },
1572  { "4thrt","4th root", 0, AV_OPT_TYPE_CONST, {.i64=FOURTHRT}, 0, 0, FLAGS, "scale" },
1573  { "5thrt","5th root", 0, AV_OPT_TYPE_CONST, {.i64=FIFTHRT}, 0, 0, FLAGS, "scale" },
1574  { "fscale", "set frequency scale", OFFSET(fscale), AV_OPT_TYPE_INT, {.i64=F_LINEAR}, 0, NB_FSCALES-1, FLAGS, "fscale" },
1575  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=F_LINEAR}, 0, 0, FLAGS, "fscale" },
1576  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=F_LOG}, 0, 0, FLAGS, "fscale" },
1577  { "saturation", "color saturation multiplier", OFFSET(saturation), AV_OPT_TYPE_FLOAT, {.dbl = 1}, -10, 10, FLAGS },
1578  { "win_func", "set window function", OFFSET(win_func), AV_OPT_TYPE_INT, {.i64 = WFUNC_HANNING}, 0, NB_WFUNC-1, FLAGS, "win_func" },
1579  { "rect", "Rectangular", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_RECT}, 0, 0, FLAGS, "win_func" },
1580  { "bartlett", "Bartlett", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BARTLETT}, 0, 0, FLAGS, "win_func" },
1581  { "hann", "Hann", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_HANNING}, 0, 0, FLAGS, "win_func" },
1582  { "hanning", "Hanning", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_HANNING}, 0, 0, FLAGS, "win_func" },
1583  { "hamming", "Hamming", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_HAMMING}, 0, 0, FLAGS, "win_func" },
1584  { "blackman", "Blackman", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BLACKMAN}, 0, 0, FLAGS, "win_func" },
1585  { "welch", "Welch", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_WELCH}, 0, 0, FLAGS, "win_func" },
1586  { "flattop", "Flat-top", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_FLATTOP}, 0, 0, FLAGS, "win_func" },
1587  { "bharris", "Blackman-Harris", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BHARRIS}, 0, 0, FLAGS, "win_func" },
1588  { "bnuttall", "Blackman-Nuttall", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BNUTTALL}, 0, 0, FLAGS, "win_func" },
1589  { "bhann", "Bartlett-Hann", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BHANN}, 0, 0, FLAGS, "win_func" },
1590  { "sine", "Sine", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_SINE}, 0, 0, FLAGS, "win_func" },
1591  { "nuttall", "Nuttall", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_NUTTALL}, 0, 0, FLAGS, "win_func" },
1592  { "lanczos", "Lanczos", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_LANCZOS}, 0, 0, FLAGS, "win_func" },
1593  { "gauss", "Gauss", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_GAUSS}, 0, 0, FLAGS, "win_func" },
1594  { "tukey", "Tukey", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_TUKEY}, 0, 0, FLAGS, "win_func" },
1595  { "dolph", "Dolph-Chebyshev", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_DOLPH}, 0, 0, FLAGS, "win_func" },
1596  { "cauchy", "Cauchy", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_CAUCHY}, 0, 0, FLAGS, "win_func" },
1597  { "parzen", "Parzen", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_PARZEN}, 0, 0, FLAGS, "win_func" },
1598  { "poisson", "Poisson", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_POISSON}, 0, 0, FLAGS, "win_func" },
1599  { "bohman", "Bohman", 0, AV_OPT_TYPE_CONST, {.i64=WFUNC_BOHMAN}, 0, 0, FLAGS, "win_func" },
1600  { "orientation", "set orientation", OFFSET(orientation), AV_OPT_TYPE_INT, {.i64=VERTICAL}, 0, NB_ORIENTATIONS-1, FLAGS, "orientation" },
1601  { "vertical", NULL, 0, AV_OPT_TYPE_CONST, {.i64=VERTICAL}, 0, 0, FLAGS, "orientation" },
1602  { "horizontal", NULL, 0, AV_OPT_TYPE_CONST, {.i64=HORIZONTAL}, 0, 0, FLAGS, "orientation" },
1603  { "gain", "set scale gain", OFFSET(gain), AV_OPT_TYPE_FLOAT, {.dbl = 1}, 0, 128, FLAGS },
1604  { "legend", "draw legend", OFFSET(legend), AV_OPT_TYPE_BOOL, {.i64 = 1}, 0, 1, FLAGS },
1605  { "rotation", "color rotation", OFFSET(rotation), AV_OPT_TYPE_FLOAT, {.dbl = 0}, -1, 1, FLAGS },
1606  { "start", "start frequency", OFFSET(start), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT32_MAX, FLAGS },
1607  { "stop", "stop frequency", OFFSET(stop), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT32_MAX, FLAGS },
1608  { NULL }
1609 };
1610 
1611 AVFILTER_DEFINE_CLASS(showspectrumpic);
1612 
1613 static int showspectrumpic_request_frame(AVFilterLink *outlink)
1614 {
1615  AVFilterContext *ctx = outlink->src;
1616  ShowSpectrumContext *s = ctx->priv;
1617  AVFilterLink *inlink = ctx->inputs[0];
1618  int ret, samples;
1619 
1621  samples = av_audio_fifo_size(s->fifo);
1622  if (ret == AVERROR_EOF && s->outpicref && samples > 0) {
1623  int consumed = 0;
1624  int x = 0, sz = s->orientation == VERTICAL ? s->w : s->h;
1625  int ch, spf, spb;
1626  AVFrame *fin;
1627 
1628  spf = s->win_size * (samples / ((s->win_size * sz) * ceil(samples / (float)(s->win_size * sz))));
1629  spf = FFMAX(1, spf);
1630 
1631  spb = (samples / (spf * sz)) * spf;
1632 
1633  fin = ff_get_audio_buffer(inlink, s->win_size);
1634  if (!fin)
1635  return AVERROR(ENOMEM);
1636 
1637  while (x < sz) {
1638  ret = av_audio_fifo_peek(s->fifo, (void **)fin->extended_data, s->win_size);
1639  if (ret < 0) {
1640  av_frame_free(&fin);
1641  return ret;
1642  }
1643 
1644  av_audio_fifo_drain(s->fifo, spf);
1645 
1646  if (ret < s->win_size) {
1647  for (ch = 0; ch < s->nb_display_channels; ch++) {
1648  memset(fin->extended_data[ch] + ret * sizeof(float), 0,
1649  (s->win_size - ret) * sizeof(float));
1650  }
1651  }
1652 
1653  ctx->internal->execute(ctx, run_channel_fft, fin, NULL, s->nb_display_channels);
1655 
1656  consumed += spf;
1657  if (consumed >= spb) {
1658  int h = s->orientation == VERTICAL ? s->h : s->w;
1659 
1660  scale_magnitudes(s, 1.f / (consumed / spf));
1662  consumed = 0;
1663  x++;
1664  for (ch = 0; ch < s->nb_display_channels; ch++)
1665  memset(s->magnitudes[ch], 0, h * sizeof(float));
1666  }
1667  }
1668 
1669  av_frame_free(&fin);
1670  s->outpicref->pts = 0;
1671 
1672  if (s->legend)
1674 
1675  ret = ff_filter_frame(outlink, s->outpicref);
1676  s->outpicref = NULL;
1677  }
1678 
1679  return ret;
1680 }
1681 
1682 static int showspectrumpic_filter_frame(AVFilterLink *inlink, AVFrame *insamples)
1683 {
1684  AVFilterContext *ctx = inlink->dst;
1685  ShowSpectrumContext *s = ctx->priv;
1686  int ret;
1687 
1688  ret = av_audio_fifo_write(s->fifo, (void **)insamples->extended_data, insamples->nb_samples);
1689  av_frame_free(&insamples);
1690  return ret;
1691 }
1692 
1693 static const AVFilterPad showspectrumpic_inputs[] = {
1694  {
1695  .name = "default",
1696  .type = AVMEDIA_TYPE_AUDIO,
1697  .filter_frame = showspectrumpic_filter_frame,
1698  },
1699  { NULL }
1700 };
1701 
1702 static const AVFilterPad showspectrumpic_outputs[] = {
1703  {
1704  .name = "default",
1705  .type = AVMEDIA_TYPE_VIDEO,
1706  .config_props = config_output,
1707  .request_frame = showspectrumpic_request_frame,
1708  },
1709  { NULL }
1710 };
1711 
1713  .name = "showspectrumpic",
1714  .description = NULL_IF_CONFIG_SMALL("Convert input audio to a spectrum video output single picture."),
1715  .uninit = uninit,
1716  .query_formats = query_formats,
1717  .priv_size = sizeof(ShowSpectrumContext),
1718  .inputs = showspectrumpic_inputs,
1719  .outputs = showspectrumpic_outputs,
1720  .priv_class = &showspectrumpic_class,
1722 };
1723 
1724 #endif // CONFIG_SHOWSPECTRUMPIC_FILTER
M
#define M(a, b)
Definition: vp3dsp.c:45
av_audio_fifo_free
void av_audio_fifo_free(AVAudioFifo *af)
Free an AVAudioFifo.
Definition: audio_fifo.c:45
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
av_fft_end
av_cold void av_fft_end(FFTContext *s)
Definition: avfft.c:48
ff_get_audio_buffer
AVFrame * ff_get_audio_buffer(AVFilterLink *link, int nb_samples)
Request an audio samples buffer with a specific set of permissions.
Definition: audio.c:86
AV_SAMPLE_FMT_FLTP
@ AV_SAMPLE_FMT_FLTP
float, planar
Definition: samplefmt.h:69
ShowSpectrumContext::win_size
int win_size
Definition: avf_showspectrum.c:84
AVFilterChannelLayouts
A list of supported channel layouts.
Definition: formats.h:86
AVFrame::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:562
plot_channel_lin
static int plot_channel_lin(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:927
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
SCROLL
@ SCROLL
Definition: avf_showspectrum.c:51
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: avf_showspectrum.c:300
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ShowSpectrumContext::data
int data
Definition: avf_showspectrum.c:73
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:286
ShowSpectrumContext::frame_rate
AVRational frame_rate
Definition: avf_showspectrum.c:59
out
FILE * out
Definition: movenc.c:54
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:264
FrequencyScale
FrequencyScale
Definition: avf_showfreqs.c:42
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1096
sample_fmts
static enum AVSampleFormat sample_fmts[]
Definition: adpcmenc.c:925
pick_color
static void pick_color(ShowSpectrumContext *s, float yf, float uf, float vf, float a, float *out)
Definition: avf_showspectrum.c:573
ff_channel_layouts_ref
int ff_channel_layouts_ref(AVFilterChannelLayouts *f, AVFilterChannelLayouts **ref)
Add *ref as a new reference to f.
Definition: formats.c:461
layouts
enum MovChannelLayoutTag * layouts
Definition: mov_chan.c:434
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:55
FFERROR_NOT_READY
return FFERROR_NOT_READY
Definition: filter_design.txt:204
ShowSpectrumContext::xpos
int xpos
x position (current column)
Definition: avf_showspectrum.c:74
ShowSpectrumContext::mode
int mode
channel display mode
Definition: avf_showspectrum.c:66
ShowSpectrumContext::overlap
float overlap
Definition: avf_showspectrum.c:87
ShowSpectrumContext::gain
float gain
Definition: avf_showspectrum.c:88
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_asprintf
char * av_asprintf(const char *fmt,...)
Definition: avstring.c:113
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
SQRT
@ SQRT
Definition: avf_showspectrum.c:49
PHASE
#define PHASE(y, ch)
Definition: avf_showspectrum.c:1209
ShowSpectrumContext::old_len
int old_len
Definition: avf_showspectrum.c:96
ColorTable::y
float y
Definition: avf_showspectrum.c:185
av_get_channel_layout_string
void av_get_channel_layout_string(char *buf, int buf_size, int nb_channels, uint64_t channel_layout)
Return a description of a channel layout.
Definition: channel_layout.c:217
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
av_frame_make_writable
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:611
ShowSpectrumContext::w
int w
Definition: avf_showspectrum.c:56
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:411
w
uint8_t w
Definition: llviddspenc.c:39
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:586
M_PI_2
#define M_PI_2
Definition: mathematics.h:55
calc_channel_magnitudes
static int calc_channel_magnitudes(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:1211
AVOption
AVOption.
Definition: opt.h:248
clear_combine_buffer
static void clear_combine_buffer(ShowSpectrumContext *s, int size)
Definition: avf_showspectrum.c:1266
b
#define b
Definition: input.c:41
GREEN
@ GREEN
Definition: avf_showspectrum.c:50
config_output
static int config_output(AVFilterLink *outlink)
Definition: avf_showspectrum.c:984
data
const char data[16]
Definition: mxf.c:142
expf
#define expf(x)
Definition: libm.h:283
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:408
av_fft_permute
void av_fft_permute(FFTContext *s, FFTComplex *z)
Do the permutation needed BEFORE calling ff_fft_calc().
Definition: avfft.c:38
ShowSpectrumContext::hop_size
int hop_size
Definition: avf_showspectrum.c:90
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
ShowSpectrumContext::saturation
float saturation
color saturation multiplier
Definition: avf_showspectrum.c:70
max
#define max(a, b)
Definition: cuda_runtime.h:33
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:149
COOL
@ COOL
Definition: avf_showspectrum.c:50
ShowSpectrumContext::channel_width
int channel_width
Definition: avf_showspectrum.c:63
OFFSET
#define OFFSET(x)
Definition: avf_showspectrum.c:103
video.h
AVFormatContext::internal
AVFormatInternal * internal
An opaque field for libavformat internal usage.
Definition: avformat.h:1699
FF_FILTER_FORWARD_STATUS_BACK
#define FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink)
Forward the status on an output link to an input link.
Definition: filters.h:199
acalc_magnitudes
static void acalc_magnitudes(ShowSpectrumContext *s)
Definition: avf_showspectrum.c:1240
WFUNC_FLATTOP
@ WFUNC_FLATTOP
Definition: window_func.h:29
sample_rate
sample_rate
Definition: ffmpeg_filter.c:170
FIFTHRT
@ FIFTHRT
Definition: avf_showspectrum.c:49
scale_magnitudes
static void scale_magnitudes(ShowSpectrumContext *s, float scale)
Definition: avf_showspectrum.c:1254
get_value
static float get_value(AVFilterContext *ctx, int ch, int y)
Definition: avf_showspectrum.c:880
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
ShowSpectrumContext::magnitudes
float ** magnitudes
Definition: avf_showspectrum.c:81
MORELAND
@ MORELAND
Definition: avf_showspectrum.c:50
ShowSpectrumContext::fifo
AVAudioFifo * fifo
Definition: avf_showspectrum.c:93
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:65
S
#define S(s, c, i)
Definition: flacdsp_template.c:46
ShowSpectrumContext::win_func
int win_func
Definition: avf_showspectrum.c:83
ShowSpectrumContext::outpicref
AVFrame * outpicref
Definition: avf_showspectrum.c:60
ShowSpectrumContext::rotation
float rotation
color rotation
Definition: avf_showspectrum.c:71
ff_inlink_consume_frame
int ff_inlink_consume_frame(AVFilterLink *link, AVFrame **rframe)
Take a frame from the link's FIFO and update the link's stats.
Definition: avfilter.c:1494
WFUNC_BLACKMAN
@ WFUNC_BLACKMAN
Definition: af_firequalizer.c:36
WFUNC_PARZEN
@ WFUNC_PARZEN
Definition: window_func.h:32
calc_channel_phases
static int calc_channel_phases(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:1226
AVAudioFifo
Context for an Audio FIFO Buffer.
Definition: audio_fifo.c:34
ShowSpectrumContext::fft_scratch
FFTComplex ** fft_scratch
scratch buffers
Definition: avf_showspectrum.c:79
log_scale
static float log_scale(const float value, const float min, const float max)
Definition: avf_showspectrum.c:632
ColorMode
ColorMode
Definition: avf_showspectrum.c:50
cosf
#define cosf(x)
Definition: libm.h:78
ShowSpectrumContext::color_mode
int color_mode
display color scheme
Definition: avf_showspectrum.c:67
av_audio_fifo_drain
int av_audio_fifo_drain(AVAudioFifo *af, int nb_samples)
Drain data from an AVAudioFifo.
Definition: audio_fifo.c:201
log10f
#define log10f(x)
Definition: libm.h:414
ShowSpectrumContext::color_buffer
float ** color_buffer
color buffer (3 * h * ch items)
Definition: avf_showspectrum.c:92
ShowSpectrumContext::window_func_lut
float * window_func_lut
Window function LUT.
Definition: avf_showspectrum.c:80
FULLFRAME
@ FULLFRAME
Definition: avf_showspectrum.c:51
NB_FSCALES
@ NB_FSCALES
Definition: avf_showspectrum.c:48
pts
static int64_t pts
Definition: transcode_aac.c:652
RAINBOW
@ RAINBOW
Definition: avf_showspectrum.c:50
ShowSpectrumContext::buf_size
int buf_size
Definition: avf_showspectrum.c:85
WFUNC_BHANN
@ WFUNC_BHANN
Definition: window_func.h:31
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
a1
#define a1
Definition: regdef.h:47
D_PHASE
@ D_PHASE
Definition: avf_showspectrum.c:47
FIERY
@ FIERY
Definition: avf_showspectrum.c:50
INTENSITY
@ INTENSITY
Definition: avf_showspectrum.c:50
avassert.h
ceil
static __device__ float ceil(float a)
Definition: cuda_runtime.h:176
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
WFUNC_DOLPH
@ WFUNC_DOLPH
Definition: window_func.h:32
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
mask
static const uint16_t mask[17]
Definition: lzw.c:38
WFUNC_NUTTALL
@ WFUNC_NUTTALL
Definition: af_firequalizer.c:39
ff_outlink_set_status
static void ff_outlink_set_status(AVFilterLink *link, int status, int64_t pts)
Set the status field of a link from the source filter.
Definition: filters.h:189
NB_DMODES
@ NB_DMODES
Definition: avf_showspectrum.c:47
ff_inlink_request_frame
void ff_inlink_request_frame(AVFilterLink *link)
Mark that a frame is wanted on the link.
Definition: avfilter.c:1620
s
#define s(width, name)
Definition: cbs_vp9.c:257
ShowSpectrumContext::fft_data
FFTComplex ** fft_data
bins holder for each (displayed) channels
Definition: avf_showspectrum.c:78
Orientation
Orientation
Definition: avf_showspectrum.c:52
FRUIT
@ FRUIT
Definition: avf_showspectrum.c:50
CHANNEL
@ CHANNEL
Definition: avf_showspectrum.c:50
g
const char * g
Definition: vf_curves.c:117
VIRIDIS
@ VIRIDIS
Definition: avf_showspectrum.c:50
av_audio_fifo_write
int av_audio_fifo_write(AVAudioFifo *af, void **data, int nb_samples)
Write data to an AVAudioFifo.
Definition: audio_fifo.c:112
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:466
draw_legend
static int draw_legend(AVFilterContext *ctx, int samples)
Definition: avf_showspectrum.c:682
ShowSpectrumContext::h
int h
Definition: avf_showspectrum.c:56
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
WFUNC_LANCZOS
@ WFUNC_LANCZOS
Definition: window_func.h:31
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
filters.h
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:309
plot_channel_log
static int plot_channel_log(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:950
WFUNC_RECT
@ WFUNC_RECT
Definition: window_func.h:28
ctx
AVFormatContext * ctx
Definition: movenc.c:48
WFUNC_BHARRIS
@ WFUNC_BHARRIS
Definition: af_firequalizer.c:41
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:540
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
color_range
color_range
Definition: vf_selectivecolor.c:44
ShowSpectrumContext::fft_bits
int fft_bits
number of bits (FFT window size = 1<<fft_bits)
Definition: avf_showspectrum.c:77
VERTICAL
@ VERTICAL
Definition: avf_showspectrum.c:52
f
#define f(width, name)
Definition: cbs_vp9.c:255
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
TERRAIN
@ TERRAIN
Definition: avf_showspectrum.c:50
arg
const char * arg
Definition: jacosubdec.c:66
COMBINED
@ COMBINED
Definition: avf_showspectrum.c:46
if
if(ret)
Definition: filter_design.txt:179
av_realloc_f
#define av_realloc_f(p, o, n)
Definition: tableprint_vlc.h:33
ShowSpectrumContext::win_scale
double win_scale
Definition: avf_showspectrum.c:86
ShowSpectrumContext::combine_buffer
float * combine_buffer
color combining buffer (3 * h items)
Definition: avf_showspectrum.c:91
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
NULL
#define NULL
Definition: coverity.c:32
NB_SLIDES
@ NB_SLIDES
Definition: avf_showspectrum.c:51
inv_log_scale
static float inv_log_scale(const float value, const float min, const float max)
Definition: avf_showspectrum.c:657
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
av_audio_fifo_alloc
AVAudioFifo * av_audio_fifo_alloc(enum AVSampleFormat sample_fmt, int channels, int nb_samples)
Allocate an AVAudioFifo.
Definition: audio_fifo.c:59
WFUNC_HAMMING
@ WFUNC_HAMMING
Definition: af_firequalizer.c:35
activate
filter_frame For filters that do not use the activate() callback
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:235
av_clipf
#define av_clipf
Definition: common.h:170
CIVIDIS
@ CIVIDIS
Definition: avf_showspectrum.c:50
parseutils.h
ShowSpectrumContext::start
int start
Definition: avf_showspectrum.c:72
get_time
static char * get_time(AVFilterContext *ctx, float seconds, int x)
Definition: avf_showspectrum.c:617
MAGNITUDE
#define MAGNITUDE(y, ch)
Definition: avf_showspectrum.c:1208
generate_window_func
static void generate_window_func(float *lut, int N, int win_func, float *overlap)
Definition: window_func.h:36
SEPARATE
@ SEPARATE
Definition: avf_showspectrum.c:46
avfft.h
WFUNC_HANNING
@ WFUNC_HANNING
Definition: window_func.h:28
WFUNC_BARTLETT
@ WFUNC_BARTLETT
Definition: window_func.h:29
sinf
#define sinf(x)
Definition: libm.h:419
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
WFUNC_BOHMAN
@ WFUNC_BOHMAN
Definition: window_func.h:33
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ShowSpectrumContext::auto_frame_rate
AVRational auto_frame_rate
Definition: avf_showspectrum.c:58
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
ShowSpectrumContext::pts
int64_t pts
Definition: avf_showspectrum.c:94
ShowSpectrumContext::consumed
int consumed
Definition: avf_showspectrum.c:89
ShowSpectrumContext::single_pic
int single_pic
Definition: avf_showspectrum.c:97
DataMode
DataMode
Definition: avf_showfreqs.c:39
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
powf
#define powf(x, y)
Definition: libm.h:50
RSCROLL
@ RSCROLL
Definition: avf_showspectrum.c:51
D_MAGNITUDE
@ D_MAGNITUDE
Definition: avf_showspectrum.c:47
FFMAX
#define FFMAX(a, b)
Definition: common.h:103
get_log_hz
static float get_log_hz(const int bin, const int num_bins, const float sample_rate)
Definition: avf_showspectrum.c:647
AV_SAMPLE_FMT_NONE
@ AV_SAMPLE_FMT_NONE
Definition: samplefmt.h:59
size
int size
Definition: twinvq_data.h:10344
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
PLASMA
@ PLASMA
Definition: avf_showspectrum.c:50
WFUNC_TUKEY
@ WFUNC_TUKEY
Definition: af_firequalizer.c:42
FFMIN
#define FFMIN(a, b)
Definition: common.h:105
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
ColorTable::a
float a
Definition: avf_showspectrum.c:185
xga_font_data.h
N
#define N
Definition: af_mcompand.c:54
ff_all_channel_layouts
AVFilterChannelLayouts * ff_all_channel_layouts(void)
Construct an empty AVFilterChannelLayouts/AVFilterFormats struct – representing any channel layout (w...
Definition: formats.c:427
av_audio_fifo_size
int av_audio_fifo_size(AVAudioFifo *af)
Get the current number of samples in the AVAudioFifo available for reading.
Definition: audio_fifo.c:228
a0
#define a0
Definition: regdef.h:46
M_PI
#define M_PI
Definition: mathematics.h:52
ShowSpectrumContext::nb_display_channels
int nb_display_channels
Definition: avf_showspectrum.c:61
internal.h
ShowSpectrumContext::scale
int scale
Definition: avf_showspectrum.c:68
AV_OPT_TYPE_FLOAT
@ AV_OPT_TYPE_FLOAT
Definition: opt.h:228
plot_spectrum_column
static int plot_spectrum_column(AVFilterLink *inlink, AVFrame *insamples)
Definition: avf_showspectrum.c:1277
ShowSpectrumContext::rate_str
char * rate_str
Definition: avf_showspectrum.c:57
av_parse_video_rate
int av_parse_video_rate(AVRational *rate, const char *arg)
Parse str and store the detected values in *rate.
Definition: parseutils.c:179
ShowSpectrumContext::old_pts
int64_t old_pts
Definition: avf_showspectrum.c:95
AVFrame::nb_samples
int nb_samples
number of audio samples (per channel) described by this frame
Definition: frame.h:384
FFTContext
Definition: fft.h:83
ShowSpectrumContext::phases
float ** phases
Definition: avf_showspectrum.c:82
i
int i
Definition: input.c:407
lrintf
#define lrintf(x)
Definition: libm_mips.h:70
FLAGS
#define FLAGS
Definition: avf_showspectrum.c:104
ShowSpectrumContext::channel_height
int channel_height
Definition: avf_showspectrum.c:64
ShowSpectrumContext
Definition: avf_showspectrum.c:54
AVFrame::extended_data
uint8_t ** extended_data
pointers to the data planes/channels.
Definition: frame.h:365
AVSampleFormat
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:58
delta
float delta
Definition: vorbis_enc_data.h:457
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
cbrtf
static av_always_inline float cbrtf(float x)
Definition: libm.h:61
uint8_t
uint8_t
Definition: audio_convert.c:194
audio_fifo.h
av_inv_q
static av_always_inline AVRational av_inv_q(AVRational q)
Invert a rational.
Definition: rational.h:159
NB_CLMODES
@ NB_CLMODES
Definition: avf_showspectrum.c:50
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:60
log2
#define log2(x)
Definition: libm.h:404
HORIZONTAL
@ HORIZONTAL
Definition: avf_showspectrum.c:52
F_LINEAR
@ F_LINEAR
Definition: avf_showspectrum.c:48
ShowSpectrumContext::ifft
FFTContext ** ifft
Inverse Fast Fourier Transform context.
Definition: avf_showspectrum.c:76
AVFilter
Filter definition.
Definition: avfilter.h:145
ret
ret
Definition: filter_design.txt:187
NB_ORIENTATIONS
@ NB_ORIENTATIONS
Definition: avf_showspectrum.c:52
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
color_range
static void color_range(ShowSpectrumContext *s, int ch, float *yf, float *uf, float *vf)
Definition: avf_showspectrum.c:511
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(showspectrum)
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:406
ShowSpectrumContext::legend
int legend
Definition: avf_showspectrum.c:98
run_channel_fft
static int run_channel_fft(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:377
ShowSpectrumContext::fscale
int fscale
Definition: avf_showspectrum.c:69
L
#define L(x)
Definition: vp56_arith.h:36
av_fft_init
FFTContext * av_fft_init(int nbits, int inverse)
Set up a complex FFT.
Definition: avfft.c:28
window_func.h
NEBULAE
@ NEBULAE
Definition: avf_showspectrum.c:50
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: avf_showspectrum.c:346
ShowSpectrumContext::start_x
int start_x
Definition: avf_showspectrum.c:99
ShowSpectrumContext::stop
int stop
zoom mode
Definition: avf_showspectrum.c:72
NB_WFUNC
@ NB_WFUNC
Definition: af_firequalizer.c:43
ff_all_samplerates
AVFilterFormats * ff_all_samplerates(void)
Definition: formats.c:421
ShowSpectrumContext::orientation
int orientation
Definition: avf_showspectrum.c:62
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Non-inlined equivalent of av_mallocz_array().
Definition: mem.c:245
channel_layout.h
WFUNC_SINE
@ WFUNC_SINE
Definition: window_func.h:30
WFUNC_CAUCHY
@ WFUNC_CAUCHY
Definition: window_func.h:32
ff_avf_showspectrum
AVFilter ff_avf_showspectrum
mode
mode
Definition: ebur128.h:83
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
LOG
@ LOG
Definition: avf_showspectrum.c:49
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
avfilter.h
FOURTHRT
@ FOURTHRT
Definition: avf_showspectrum.c:49
NB_MODES
@ NB_MODES
Definition: avf_showspectrum.c:46
cm
#define cm
Definition: dvbsubdec.c:37
F_LOG
@ F_LOG
Definition: avf_showspectrum.c:48
samples
Filter the word “frame” indicates either a video frame or a group of audio samples
Definition: filter_design.txt:8
av_clip_uint8
#define av_clip_uint8
Definition: common.h:128
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
ff_outlink_get_status
int ff_outlink_get_status(AVFilterLink *link)
Get the status on an output link.
Definition: avfilter.c:1643
AVFilterContext
An instance of a filter.
Definition: avfilter.h:341
DisplayMode
DisplayMode
Definition: avf_ahistogram.c:34
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
WFUNC_GAUSS
@ WFUNC_GAUSS
Definition: window_func.h:31
ColorTable::v
float v
Definition: avf_showspectrum.c:185
LINEAR
@ LINEAR
Definition: avf_showspectrum.c:49
WFUNC_BNUTTALL
@ WFUNC_BNUTTALL
Definition: af_firequalizer.c:40
audio.h
ShowSpectrumContext::start_y
int start_y
Definition: avf_showspectrum.c:99
AVFilterFormatsConfig::formats
AVFilterFormats * formats
List of supported formats (pixel or sample).
Definition: avfilter.h:445
WFUNC_POISSON
@ WFUNC_POISSON
Definition: window_func.h:32
DisplayScale
DisplayScale
Definition: avf_ahistogram.c:31
ff_avf_showspectrumpic
AVFilter ff_avf_showspectrumpic
avpriv_cga_font
const uint8_t avpriv_cga_font[2048]
Definition: xga_font_data.c:29
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
FF_FILTER_FORWARD_STATUS
FF_FILTER_FORWARD_STATUS(inlink, outlink)
NB_SCALES
@ NB_SCALES
Definition: avf_showspectrum.c:49
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:242
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
ShowSpectrumContext::sliding
int sliding
1 if sliding mode, 0 otherwise
Definition: avf_showspectrum.c:65
bin_pos
static float bin_pos(const int bin, const int num_bins, const float sample_rate)
Definition: avf_showspectrum.c:672
ColorTable::u
float u
Definition: avf_showspectrum.c:185
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
CBRT
@ CBRT
Definition: avf_showspectrum.c:49
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
h
h
Definition: vp9dsp_template.c:2038
MAGMA
@ MAGMA
Definition: avf_showspectrum.c:50
ff_outlink_frame_wanted
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the ff_outlink_frame_wanted() function. If this function returns true
color_table
static const struct ColorTable color_table[][8]
avstring.h
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:229
showspectrum_options
static const AVOption showspectrum_options[]
Definition: avf_showspectrum.c:106
ShowSpectrumContext::fft
FFTContext ** fft
Fast Fourier Transform context.
Definition: avf_showspectrum.c:75
ColorTable
Definition: avf_showspectrum.c:184
int
int
Definition: ffmpeg_filter.c:170
av_audio_fifo_peek
int av_audio_fifo_peek(AVAudioFifo *af, void **data, int nb_samples)
Peek data from an AVAudioFifo.
Definition: audio_fifo.c:138
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
av_fft_calc
void av_fft_calc(FFTContext *s, FFTComplex *z)
Do a complex FFT with the parameters defined in av_fft_init().
Definition: avfft.c:43
REPLACE
@ REPLACE
Definition: avf_showspectrum.c:51
FIRE
@ FIRE
Definition: avf_showspectrum.c:50
drawtext
static void drawtext(AVFrame *pic, int x, int y, const char *txt, int o)
Definition: avf_showspectrum.c:477
WFUNC_WELCH
@ WFUNC_WELCH
Definition: window_func.h:29
channel
channel
Definition: ebur128.h:39
SlideMode
SlideMode
Definition: avf_ahistogram.c:33
FFTComplex
Definition: avfft.h:37
ff_filter_set_ready
void ff_filter_set_ready(AVFilterContext *filter, unsigned priority)
Mark a filter ready and schedule it for activation.
Definition: avfilter.c:193
min
float min
Definition: vorbis_enc_data.h:456
ShowSpectrumContext::plot_channel
int(* plot_channel)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:100