FFmpeg
avf_showspectrum.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2012-2013 Clément Bœsch
3  * Copyright (c) 2013 Rudolf Polzer <divverent@xonotic.org>
4  * Copyright (c) 2015 Paul B Mahol
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * audio to spectrum (video) transmedia filter, based on ffplay rdft showmode
26  * (by Michael Niedermayer) and lavfi/avf_showwaves (by Stefano Sabatini).
27  */
28 
29 #include "config_components.h"
30 
31 #include <float.h>
32 #include <math.h>
33 
34 #include "libavutil/tx.h"
35 #include "libavutil/avassert.h"
36 #include "libavutil/avstring.h"
38 #include "libavutil/cpu.h"
39 #include "libavutil/opt.h"
40 #include "libavutil/parseutils.h"
42 #include "audio.h"
43 #include "video.h"
44 #include "avfilter.h"
45 #include "filters.h"
46 #include "internal.h"
47 #include "window_func.h"
48 
56 
57 #define DEFAULT_LENGTH 300
58 
59 typedef struct ShowSpectrumContext {
60  const AVClass *class;
61  int w, h;
62  char *rate_str;
71  int sliding; ///< 1 if sliding mode, 0 otherwise
72  int mode; ///< channel display mode
73  int color_mode; ///< display color scheme
74  int scale;
75  int fscale;
76  float saturation; ///< color saturation multiplier
77  float rotation; ///< color rotation
78  int start, stop; ///< zoom mode
79  int data;
80  int xpos; ///< x position (current column)
81  AVTXContext **fft; ///< Fast Fourier Transform context
82  AVTXContext **ifft; ///< Inverse Fast Fourier Transform context
85  int fft_size; ///< number of coeffs (FFT window size)
86  AVComplexFloat **fft_in; ///< input FFT coeffs
87  AVComplexFloat **fft_data; ///< bins holder for each (displayed) channels
88  AVComplexFloat **fft_scratch;///< scratch buffers
89  float *window_func_lut; ///< Window function LUT
90  float **magnitudes;
91  float **phases;
92  int win_func;
93  int win_size;
94  int buf_size;
95  double win_scale;
96  float overlap;
97  float gain;
98  int hop_size;
99  float *combine_buffer; ///< color combining buffer (4 * h items)
100  float **color_buffer; ///< color buffer (4 * h * ch items)
101  int64_t pts;
102  int64_t old_pts;
103  int64_t in_pts;
104  int old_len;
106  int legend;
108  float drange, limit;
109  float dmin, dmax;
110  uint64_t samples;
111  int (*plot_channel)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
112 
114 
116  unsigned int nb_frames;
117  unsigned int frames_size;
119 
120 #define OFFSET(x) offsetof(ShowSpectrumContext, x)
121 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
122 
123 static const AVOption showspectrum_options[] = {
124  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "640x512"}, 0, 0, FLAGS },
125  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "640x512"}, 0, 0, FLAGS },
126  { "slide", "set sliding mode", OFFSET(sliding), AV_OPT_TYPE_INT, {.i64 = 0}, 0, NB_SLIDES-1, FLAGS, "slide" },
127  { "replace", "replace old columns with new", 0, AV_OPT_TYPE_CONST, {.i64=REPLACE}, 0, 0, FLAGS, "slide" },
128  { "scroll", "scroll from right to left", 0, AV_OPT_TYPE_CONST, {.i64=SCROLL}, 0, 0, FLAGS, "slide" },
129  { "fullframe", "return full frames", 0, AV_OPT_TYPE_CONST, {.i64=FULLFRAME}, 0, 0, FLAGS, "slide" },
130  { "rscroll", "scroll from left to right", 0, AV_OPT_TYPE_CONST, {.i64=RSCROLL}, 0, 0, FLAGS, "slide" },
131  { "lreplace", "replace from right to left", 0, AV_OPT_TYPE_CONST, {.i64=LREPLACE}, 0, 0, FLAGS, "slide" },
132  { "mode", "set channel display mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=COMBINED}, COMBINED, NB_MODES-1, FLAGS, "mode" },
133  { "combined", "combined mode", 0, AV_OPT_TYPE_CONST, {.i64=COMBINED}, 0, 0, FLAGS, "mode" },
134  { "separate", "separate mode", 0, AV_OPT_TYPE_CONST, {.i64=SEPARATE}, 0, 0, FLAGS, "mode" },
135  { "color", "set channel coloring", OFFSET(color_mode), AV_OPT_TYPE_INT, {.i64=CHANNEL}, CHANNEL, NB_CLMODES-1, FLAGS, "color" },
136  { "channel", "separate color for each channel", 0, AV_OPT_TYPE_CONST, {.i64=CHANNEL}, 0, 0, FLAGS, "color" },
137  { "intensity", "intensity based coloring", 0, AV_OPT_TYPE_CONST, {.i64=INTENSITY}, 0, 0, FLAGS, "color" },
138  { "rainbow", "rainbow based coloring", 0, AV_OPT_TYPE_CONST, {.i64=RAINBOW}, 0, 0, FLAGS, "color" },
139  { "moreland", "moreland based coloring", 0, AV_OPT_TYPE_CONST, {.i64=MORELAND}, 0, 0, FLAGS, "color" },
140  { "nebulae", "nebulae based coloring", 0, AV_OPT_TYPE_CONST, {.i64=NEBULAE}, 0, 0, FLAGS, "color" },
141  { "fire", "fire based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FIRE}, 0, 0, FLAGS, "color" },
142  { "fiery", "fiery based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FIERY}, 0, 0, FLAGS, "color" },
143  { "fruit", "fruit based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FRUIT}, 0, 0, FLAGS, "color" },
144  { "cool", "cool based coloring", 0, AV_OPT_TYPE_CONST, {.i64=COOL}, 0, 0, FLAGS, "color" },
145  { "magma", "magma based coloring", 0, AV_OPT_TYPE_CONST, {.i64=MAGMA}, 0, 0, FLAGS, "color" },
146  { "green", "green based coloring", 0, AV_OPT_TYPE_CONST, {.i64=GREEN}, 0, 0, FLAGS, "color" },
147  { "viridis", "viridis based coloring", 0, AV_OPT_TYPE_CONST, {.i64=VIRIDIS}, 0, 0, FLAGS, "color" },
148  { "plasma", "plasma based coloring", 0, AV_OPT_TYPE_CONST, {.i64=PLASMA}, 0, 0, FLAGS, "color" },
149  { "cividis", "cividis based coloring", 0, AV_OPT_TYPE_CONST, {.i64=CIVIDIS}, 0, 0, FLAGS, "color" },
150  { "terrain", "terrain based coloring", 0, AV_OPT_TYPE_CONST, {.i64=TERRAIN}, 0, 0, FLAGS, "color" },
151  { "scale", "set display scale", OFFSET(scale), AV_OPT_TYPE_INT, {.i64=SQRT}, LINEAR, NB_SCALES-1, FLAGS, "scale" },
152  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=LINEAR}, 0, 0, FLAGS, "scale" },
153  { "sqrt", "square root", 0, AV_OPT_TYPE_CONST, {.i64=SQRT}, 0, 0, FLAGS, "scale" },
154  { "cbrt", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64=CBRT}, 0, 0, FLAGS, "scale" },
155  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=LOG}, 0, 0, FLAGS, "scale" },
156  { "4thrt","4th root", 0, AV_OPT_TYPE_CONST, {.i64=FOURTHRT}, 0, 0, FLAGS, "scale" },
157  { "5thrt","5th root", 0, AV_OPT_TYPE_CONST, {.i64=FIFTHRT}, 0, 0, FLAGS, "scale" },
158  { "fscale", "set frequency scale", OFFSET(fscale), AV_OPT_TYPE_INT, {.i64=F_LINEAR}, 0, NB_FSCALES-1, FLAGS, "fscale" },
159  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=F_LINEAR}, 0, 0, FLAGS, "fscale" },
160  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=F_LOG}, 0, 0, FLAGS, "fscale" },
161  { "saturation", "color saturation multiplier", OFFSET(saturation), AV_OPT_TYPE_FLOAT, {.dbl = 1}, -10, 10, FLAGS },
162  WIN_FUNC_OPTION("win_func", OFFSET(win_func), FLAGS, WFUNC_HANNING),
163  { "orientation", "set orientation", OFFSET(orientation), AV_OPT_TYPE_INT, {.i64=VERTICAL}, 0, NB_ORIENTATIONS-1, FLAGS, "orientation" },
164  { "vertical", NULL, 0, AV_OPT_TYPE_CONST, {.i64=VERTICAL}, 0, 0, FLAGS, "orientation" },
165  { "horizontal", NULL, 0, AV_OPT_TYPE_CONST, {.i64=HORIZONTAL}, 0, 0, FLAGS, "orientation" },
166  { "overlap", "set window overlap", OFFSET(overlap), AV_OPT_TYPE_FLOAT, {.dbl = 0}, 0, 1, FLAGS },
167  { "gain", "set scale gain", OFFSET(gain), AV_OPT_TYPE_FLOAT, {.dbl = 1}, 0, 128, FLAGS },
168  { "data", "set data mode", OFFSET(data), AV_OPT_TYPE_INT, {.i64 = 0}, 0, NB_DMODES-1, FLAGS, "data" },
169  { "magnitude", NULL, 0, AV_OPT_TYPE_CONST, {.i64=D_MAGNITUDE}, 0, 0, FLAGS, "data" },
170  { "phase", NULL, 0, AV_OPT_TYPE_CONST, {.i64=D_PHASE}, 0, 0, FLAGS, "data" },
171  { "uphase", NULL, 0, AV_OPT_TYPE_CONST, {.i64=D_UPHASE}, 0, 0, FLAGS, "data" },
172  { "rotation", "color rotation", OFFSET(rotation), AV_OPT_TYPE_FLOAT, {.dbl = 0}, -1, 1, FLAGS },
173  { "start", "start frequency", OFFSET(start), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT32_MAX, FLAGS },
174  { "stop", "stop frequency", OFFSET(stop), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT32_MAX, FLAGS },
175  { "fps", "set video rate", OFFSET(rate_str), AV_OPT_TYPE_STRING, {.str = "auto"}, 0, 0, FLAGS },
176  { "legend", "draw legend", OFFSET(legend), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, FLAGS },
177  { "drange", "set dynamic range in dBFS", OFFSET(drange), AV_OPT_TYPE_FLOAT, {.dbl = 120}, 10, 200, FLAGS },
178  { "limit", "set upper limit in dBFS", OFFSET(limit), AV_OPT_TYPE_FLOAT, {.dbl = 0}, -100, 100, FLAGS },
179  { "opacity", "set opacity strength", OFFSET(opacity_factor), AV_OPT_TYPE_FLOAT, {.dbl = 1}, 0, 10, FLAGS },
180  { NULL }
181 };
182 
183 AVFILTER_DEFINE_CLASS(showspectrum);
184 
185 static const struct ColorTable {
186  float a, y, u, v;
187 } color_table[][8] = {
188  [INTENSITY] = {
189  { 0, 0, 0, 0 },
190  { 0.13, .03587126228984074, .1573300977624594, -.02548747583751842 },
191  { 0.30, .18572281794568020, .1772436246393981, .17475554840414750 },
192  { 0.60, .28184980583656130, -.1593064119945782, .47132074554608920 },
193  { 0.73, .65830621175547810, -.3716070802232764, .24352759331252930 },
194  { 0.78, .76318535758242900, -.4307467689263783, .16866496622310430 },
195  { 0.91, .95336363636363640, -.2045454545454546, .03313636363636363 },
196  { 1, 1, 0, 0 }},
197  [RAINBOW] = {
198  { 0, 0, 0, 0 },
199  { 0.13, 44/256., (189-128)/256., (138-128)/256. },
200  { 0.25, 29/256., (186-128)/256., (119-128)/256. },
201  { 0.38, 119/256., (194-128)/256., (53-128)/256. },
202  { 0.60, 111/256., (73-128)/256., (59-128)/256. },
203  { 0.73, 205/256., (19-128)/256., (149-128)/256. },
204  { 0.86, 135/256., (83-128)/256., (200-128)/256. },
205  { 1, 73/256., (95-128)/256., (225-128)/256. }},
206  [MORELAND] = {
207  { 0, 44/256., (181-128)/256., (112-128)/256. },
208  { 0.13, 126/256., (177-128)/256., (106-128)/256. },
209  { 0.25, 164/256., (163-128)/256., (109-128)/256. },
210  { 0.38, 200/256., (140-128)/256., (120-128)/256. },
211  { 0.60, 201/256., (117-128)/256., (141-128)/256. },
212  { 0.73, 177/256., (103-128)/256., (165-128)/256. },
213  { 0.86, 136/256., (100-128)/256., (183-128)/256. },
214  { 1, 68/256., (117-128)/256., (203-128)/256. }},
215  [NEBULAE] = {
216  { 0, 10/256., (134-128)/256., (132-128)/256. },
217  { 0.23, 21/256., (137-128)/256., (130-128)/256. },
218  { 0.45, 35/256., (134-128)/256., (134-128)/256. },
219  { 0.57, 51/256., (130-128)/256., (139-128)/256. },
220  { 0.67, 104/256., (116-128)/256., (162-128)/256. },
221  { 0.77, 120/256., (105-128)/256., (188-128)/256. },
222  { 0.87, 140/256., (105-128)/256., (188-128)/256. },
223  { 1, 1, 0, 0 }},
224  [FIRE] = {
225  { 0, 0, 0, 0 },
226  { 0.23, 44/256., (132-128)/256., (127-128)/256. },
227  { 0.45, 62/256., (116-128)/256., (140-128)/256. },
228  { 0.57, 75/256., (105-128)/256., (152-128)/256. },
229  { 0.67, 95/256., (91-128)/256., (166-128)/256. },
230  { 0.77, 126/256., (74-128)/256., (172-128)/256. },
231  { 0.87, 164/256., (73-128)/256., (162-128)/256. },
232  { 1, 1, 0, 0 }},
233  [FIERY] = {
234  { 0, 0, 0, 0 },
235  { 0.23, 36/256., (116-128)/256., (163-128)/256. },
236  { 0.45, 52/256., (102-128)/256., (200-128)/256. },
237  { 0.57, 116/256., (84-128)/256., (196-128)/256. },
238  { 0.67, 157/256., (67-128)/256., (181-128)/256. },
239  { 0.77, 193/256., (40-128)/256., (155-128)/256. },
240  { 0.87, 221/256., (101-128)/256., (134-128)/256. },
241  { 1, 1, 0, 0 }},
242  [FRUIT] = {
243  { 0, 0, 0, 0 },
244  { 0.20, 29/256., (136-128)/256., (119-128)/256. },
245  { 0.30, 60/256., (119-128)/256., (90-128)/256. },
246  { 0.40, 85/256., (91-128)/256., (85-128)/256. },
247  { 0.50, 116/256., (70-128)/256., (105-128)/256. },
248  { 0.60, 151/256., (50-128)/256., (146-128)/256. },
249  { 0.70, 191/256., (63-128)/256., (178-128)/256. },
250  { 1, 98/256., (80-128)/256., (221-128)/256. }},
251  [COOL] = {
252  { 0, 0, 0, 0 },
253  { .15, 0, .5, -.5 },
254  { 1, 1, -.5, .5 }},
255  [MAGMA] = {
256  { 0, 0, 0, 0 },
257  { 0.10, 23/256., (175-128)/256., (120-128)/256. },
258  { 0.23, 43/256., (158-128)/256., (144-128)/256. },
259  { 0.35, 85/256., (138-128)/256., (179-128)/256. },
260  { 0.48, 96/256., (128-128)/256., (189-128)/256. },
261  { 0.64, 128/256., (103-128)/256., (214-128)/256. },
262  { 0.92, 205/256., (80-128)/256., (152-128)/256. },
263  { 1, 1, 0, 0 }},
264  [GREEN] = {
265  { 0, 0, 0, 0 },
266  { .75, .5, 0, -.5 },
267  { 1, 1, 0, 0 }},
268  [VIRIDIS] = {
269  { 0, 0, 0, 0 },
270  { 0.10, 0x39/255., (0x9D -128)/255., (0x8F -128)/255. },
271  { 0.23, 0x5C/255., (0x9A -128)/255., (0x68 -128)/255. },
272  { 0.35, 0x69/255., (0x93 -128)/255., (0x57 -128)/255. },
273  { 0.48, 0x76/255., (0x88 -128)/255., (0x4B -128)/255. },
274  { 0.64, 0x8A/255., (0x72 -128)/255., (0x4F -128)/255. },
275  { 0.80, 0xA3/255., (0x50 -128)/255., (0x66 -128)/255. },
276  { 1, 0xCC/255., (0x2F -128)/255., (0x87 -128)/255. }},
277  [PLASMA] = {
278  { 0, 0, 0, 0 },
279  { 0.10, 0x27/255., (0xC2 -128)/255., (0x82 -128)/255. },
280  { 0.58, 0x5B/255., (0x9A -128)/255., (0xAE -128)/255. },
281  { 0.70, 0x89/255., (0x44 -128)/255., (0xAB -128)/255. },
282  { 0.80, 0xB4/255., (0x2B -128)/255., (0x9E -128)/255. },
283  { 0.91, 0xD2/255., (0x38 -128)/255., (0x92 -128)/255. },
284  { 1, 1, 0, 0. }},
285  [CIVIDIS] = {
286  { 0, 0, 0, 0 },
287  { 0.20, 0x28/255., (0x98 -128)/255., (0x6F -128)/255. },
288  { 0.50, 0x48/255., (0x95 -128)/255., (0x74 -128)/255. },
289  { 0.63, 0x69/255., (0x84 -128)/255., (0x7F -128)/255. },
290  { 0.76, 0x89/255., (0x75 -128)/255., (0x84 -128)/255. },
291  { 0.90, 0xCE/255., (0x35 -128)/255., (0x95 -128)/255. },
292  { 1, 1, 0, 0. }},
293  [TERRAIN] = {
294  { 0, 0, 0, 0 },
295  { 0.15, 0, .5, 0 },
296  { 0.60, 1, -.5, -.5 },
297  { 0.85, 1, -.5, .5 },
298  { 1, 1, 0, 0 }},
299 };
300 
302 {
303  ShowSpectrumContext *s = ctx->priv;
304  int i;
305 
306  av_freep(&s->combine_buffer);
307  if (s->fft) {
308  for (i = 0; i < s->nb_display_channels; i++)
309  av_tx_uninit(&s->fft[i]);
310  }
311  av_freep(&s->fft);
312  if (s->ifft) {
313  for (i = 0; i < s->nb_display_channels; i++)
314  av_tx_uninit(&s->ifft[i]);
315  }
316  av_freep(&s->ifft);
317  if (s->fft_data) {
318  for (i = 0; i < s->nb_display_channels; i++)
319  av_freep(&s->fft_data[i]);
320  }
321  av_freep(&s->fft_data);
322  if (s->fft_in) {
323  for (i = 0; i < s->nb_display_channels; i++)
324  av_freep(&s->fft_in[i]);
325  }
326  av_freep(&s->fft_in);
327  if (s->fft_scratch) {
328  for (i = 0; i < s->nb_display_channels; i++)
329  av_freep(&s->fft_scratch[i]);
330  }
331  av_freep(&s->fft_scratch);
332  if (s->color_buffer) {
333  for (i = 0; i < s->nb_display_channels; i++)
334  av_freep(&s->color_buffer[i]);
335  }
336  av_freep(&s->color_buffer);
337  av_freep(&s->window_func_lut);
338  if (s->magnitudes) {
339  for (i = 0; i < s->nb_display_channels; i++)
340  av_freep(&s->magnitudes[i]);
341  }
342  av_freep(&s->magnitudes);
343  av_frame_free(&s->outpicref);
344  av_frame_free(&s->in_frame);
345  if (s->phases) {
346  for (i = 0; i < s->nb_display_channels; i++)
347  av_freep(&s->phases[i]);
348  }
349  av_freep(&s->phases);
350 
351  while (s->nb_frames > 0) {
352  av_frame_free(&s->frames[s->nb_frames - 1]);
353  s->nb_frames--;
354  }
355 
356  av_freep(&s->frames);
357 }
358 
360 {
363  AVFilterLink *inlink = ctx->inputs[0];
364  AVFilterLink *outlink = ctx->outputs[0];
367  int ret;
368 
369  /* set input audio formats */
371  if ((ret = ff_formats_ref(formats, &inlink->outcfg.formats)) < 0)
372  return ret;
373 
375  if ((ret = ff_channel_layouts_ref(layouts, &inlink->outcfg.channel_layouts)) < 0)
376  return ret;
377 
379  if ((ret = ff_formats_ref(formats, &inlink->outcfg.samplerates)) < 0)
380  return ret;
381 
382  /* set output video format */
384  if ((ret = ff_formats_ref(formats, &outlink->incfg.formats)) < 0)
385  return ret;
386 
387  return 0;
388 }
389 
390 static int run_channel_fft(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
391 {
392  ShowSpectrumContext *s = ctx->priv;
393  AVFilterLink *inlink = ctx->inputs[0];
394  const float *window_func_lut = s->window_func_lut;
395  AVFrame *fin = arg;
396  const int ch = jobnr;
397  int n;
398 
399  /* fill FFT input with the number of samples available */
400  const float *p = (float *)fin->extended_data[ch];
401  float *in_frame = (float *)s->in_frame->extended_data[ch];
402 
403  memmove(in_frame, in_frame + s->hop_size, (s->fft_size - s->hop_size) * sizeof(float));
404  memcpy(in_frame + s->fft_size - s->hop_size, p, fin->nb_samples * sizeof(float));
405 
406  for (int i = fin->nb_samples; i < s->hop_size; i++)
407  in_frame[i + s->fft_size - s->hop_size] = 0.f;
408 
409  if (s->stop) {
410  float theta, phi, psi, a, b, S, c;
411  AVComplexFloat *f = s->fft_in[ch];
412  AVComplexFloat *g = s->fft_data[ch];
413  AVComplexFloat *h = s->fft_scratch[ch];
414  int L = s->buf_size;
415  int N = s->win_size;
416  int M = s->win_size / 2;
417 
418  for (n = 0; n < s->win_size; n++) {
419  s->fft_data[ch][n].re = in_frame[n] * window_func_lut[n];
420  s->fft_data[ch][n].im = 0;
421  }
422 
423  phi = 2.f * M_PI * (s->stop - s->start) / (float)inlink->sample_rate / (M - 1);
424  theta = 2.f * M_PI * s->start / (float)inlink->sample_rate;
425 
426  for (int n = 0; n < M; n++) {
427  h[n].re = cosf(n * n / 2.f * phi);
428  h[n].im = sinf(n * n / 2.f * phi);
429  }
430 
431  for (int n = M; n < L; n++) {
432  h[n].re = 0.f;
433  h[n].im = 0.f;
434  }
435 
436  for (int n = L - N; n < L; n++) {
437  h[n].re = cosf((L - n) * (L - n) / 2.f * phi);
438  h[n].im = sinf((L - n) * (L - n) / 2.f * phi);
439  }
440 
441  for (int n = N; n < L; n++) {
442  g[n].re = 0.f;
443  g[n].im = 0.f;
444  }
445 
446  for (int n = 0; n < N; n++) {
447  psi = n * theta + n * n / 2.f * phi;
448  c = cosf(psi);
449  S = -sinf(psi);
450  a = c * g[n].re - S * g[n].im;
451  b = S * g[n].re + c * g[n].im;
452  g[n].re = a;
453  g[n].im = b;
454  }
455 
456  memcpy(f, h, s->buf_size * sizeof(*f));
457  s->tx_fn(s->fft[ch], h, f, sizeof(AVComplexFloat));
458 
459  memcpy(f, g, s->buf_size * sizeof(*f));
460  s->tx_fn(s->fft[ch], g, f, sizeof(AVComplexFloat));
461 
462  for (int n = 0; n < L; n++) {
463  c = g[n].re;
464  S = g[n].im;
465  a = c * h[n].re - S * h[n].im;
466  b = S * h[n].re + c * h[n].im;
467 
468  g[n].re = a / L;
469  g[n].im = b / L;
470  }
471 
472  memcpy(f, g, s->buf_size * sizeof(*f));
473  s->itx_fn(s->ifft[ch], g, f, sizeof(AVComplexFloat));
474 
475  for (int k = 0; k < M; k++) {
476  psi = k * k / 2.f * phi;
477  c = cosf(psi);
478  S = -sinf(psi);
479  a = c * g[k].re - S * g[k].im;
480  b = S * g[k].re + c * g[k].im;
481  s->fft_data[ch][k].re = a;
482  s->fft_data[ch][k].im = b;
483  }
484  } else {
485  for (n = 0; n < s->win_size; n++) {
486  s->fft_in[ch][n].re = in_frame[n] * window_func_lut[n];
487  s->fft_in[ch][n].im = 0;
488  }
489 
490  /* run FFT on each samples set */
491  s->tx_fn(s->fft[ch], s->fft_data[ch], s->fft_in[ch], sizeof(AVComplexFloat));
492  }
493 
494  return 0;
495 }
496 
497 static void drawtext(AVFrame *pic, int x, int y, const char *txt, int o)
498 {
499  const uint8_t *font;
500  int font_height;
501 
502  font = avpriv_cga_font, font_height = 8;
503 
504  for (int i = 0; txt[i]; i++) {
505  int char_y, mask;
506 
507  if (o) {
508  for (char_y = font_height - 1; char_y >= 0; char_y--) {
509  uint8_t *p = pic->data[0] + (y + i * 10) * pic->linesize[0] + x;
510  for (mask = 0x80; mask; mask >>= 1) {
511  if (font[txt[i] * font_height + font_height - 1 - char_y] & mask)
512  p[char_y] = ~p[char_y];
513  p += pic->linesize[0];
514  }
515  }
516  } else {
517  uint8_t *p = pic->data[0] + y*pic->linesize[0] + (x + i*8);
518  for (char_y = 0; char_y < font_height; char_y++) {
519  for (mask = 0x80; mask; mask >>= 1) {
520  if (font[txt[i] * font_height + char_y] & mask)
521  *p = ~(*p);
522  p++;
523  }
524  p += pic->linesize[0] - 8;
525  }
526  }
527  }
528 
529  for (int i = 0; txt[i] && pic->data[3]; i++) {
530  int char_y, mask;
531 
532  if (o) {
533  for (char_y = font_height - 1; char_y >= 0; char_y--) {
534  uint8_t *p = pic->data[3] + (y + i * 10) * pic->linesize[3] + x;
535  for (mask = 0x80; mask; mask >>= 1) {
536  for (int k = 0; k < 8; k++)
537  p[k] = 255;
538  p += pic->linesize[3];
539  }
540  }
541  } else {
542  uint8_t *p = pic->data[3] + y*pic->linesize[3] + (x + i*8);
543  for (char_y = 0; char_y < font_height; char_y++) {
544  for (mask = 0x80; mask; mask >>= 1)
545  *p++ = 255;
546  p += pic->linesize[3] - 8;
547  }
548  }
549  }
550 }
551 
552 static void color_range(ShowSpectrumContext *s, int ch,
553  float *yf, float *uf, float *vf)
554 {
555  switch (s->mode) {
556  case COMBINED:
557  // reduce range by channel count
558  *yf = 256.0f / s->nb_display_channels;
559  switch (s->color_mode) {
560  case RAINBOW:
561  case MORELAND:
562  case NEBULAE:
563  case FIRE:
564  case FIERY:
565  case FRUIT:
566  case COOL:
567  case GREEN:
568  case VIRIDIS:
569  case PLASMA:
570  case CIVIDIS:
571  case TERRAIN:
572  case MAGMA:
573  case INTENSITY:
574  *uf = *yf;
575  *vf = *yf;
576  break;
577  case CHANNEL:
578  /* adjust saturation for mixed UV coloring */
579  /* this factor is correct for infinite channels, an approximation otherwise */
580  *uf = *yf * M_PI;
581  *vf = *yf * M_PI;
582  break;
583  default:
584  av_assert0(0);
585  }
586  break;
587  case SEPARATE:
588  // full range
589  *yf = 256.0f;
590  *uf = 256.0f;
591  *vf = 256.0f;
592  break;
593  default:
594  av_assert0(0);
595  }
596 
597  if (s->color_mode == CHANNEL) {
598  if (s->nb_display_channels > 1) {
599  *uf *= 0.5f * sinf((2 * M_PI * ch) / s->nb_display_channels + M_PI * s->rotation);
600  *vf *= 0.5f * cosf((2 * M_PI * ch) / s->nb_display_channels + M_PI * s->rotation);
601  } else {
602  *uf *= 0.5f * sinf(M_PI * s->rotation);
603  *vf *= 0.5f * cosf(M_PI * s->rotation + M_PI_2);
604  }
605  } else {
606  *uf += *uf * sinf(M_PI * s->rotation);
607  *vf += *vf * cosf(M_PI * s->rotation + M_PI_2);
608  }
609 
610  *uf *= s->saturation;
611  *vf *= s->saturation;
612 }
613 
615  float yf, float uf, float vf,
616  float a, float *out)
617 {
618  const float af = s->opacity_factor * 255.f;
619 
620  if (s->color_mode > CHANNEL) {
621  const int cm = s->color_mode;
622  float y, u, v;
623  int i;
624 
625  for (i = 1; i < FF_ARRAY_ELEMS(color_table[cm]) - 1; i++)
626  if (color_table[cm][i].a >= a)
627  break;
628  // i now is the first item >= the color
629  // now we know to interpolate between item i - 1 and i
630  if (a <= color_table[cm][i - 1].a) {
631  y = color_table[cm][i - 1].y;
632  u = color_table[cm][i - 1].u;
633  v = color_table[cm][i - 1].v;
634  } else if (a >= color_table[cm][i].a) {
635  y = color_table[cm][i].y;
636  u = color_table[cm][i].u;
637  v = color_table[cm][i].v;
638  } else {
639  float start = color_table[cm][i - 1].a;
640  float end = color_table[cm][i].a;
641  float lerpfrac = (a - start) / (end - start);
642  y = color_table[cm][i - 1].y * (1.0f - lerpfrac)
643  + color_table[cm][i].y * lerpfrac;
644  u = color_table[cm][i - 1].u * (1.0f - lerpfrac)
645  + color_table[cm][i].u * lerpfrac;
646  v = color_table[cm][i - 1].v * (1.0f - lerpfrac)
647  + color_table[cm][i].v * lerpfrac;
648  }
649 
650  out[0] = y * yf;
651  out[1] = u * uf;
652  out[2] = v * vf;
653  out[3] = a * af;
654  } else {
655  out[0] = a * yf;
656  out[1] = a * uf;
657  out[2] = a * vf;
658  out[3] = a * af;
659  }
660 }
661 
662 static char *get_time(AVFilterContext *ctx, float seconds, int x)
663 {
664  char *units;
665 
666  if (x == 0)
667  units = av_asprintf("0");
668  else if (log10(seconds) > 6)
669  units = av_asprintf("%.2fh", seconds / (60 * 60));
670  else if (log10(seconds) > 3)
671  units = av_asprintf("%.2fm", seconds / 60);
672  else
673  units = av_asprintf("%.2fs", seconds);
674  return units;
675 }
676 
677 static float log_scale(const float bin,
678  const float bmin, const float bmax,
679  const float min, const float max)
680 {
681  return exp2f(((bin - bmin) / (bmax - bmin)) * (log2f(max) - log2f(min)) + log2f(min));
682 }
683 
684 static float get_hz(const float bin, const float bmax,
685  const float min, const float max,
686  int fscale)
687 {
688  switch (fscale) {
689  case F_LINEAR:
690  return min + (bin / bmax) * (max - min);
691  case F_LOG:
692  return min + log_scale(bin, 0, bmax, 20.f, max - min);
693  default:
694  return 0.f;
695  }
696 }
697 
698 static float inv_log_scale(float bin,
699  float bmin, float bmax,
700  float min, float max)
701 {
702  return (min * exp2f((bin * (log2f(max) - log2f(20.f))) / bmax) + min) * bmax / max;
703 }
704 
705 static float bin_pos(const int bin, const int num_bins, const float min, const float max)
706 {
707  return inv_log_scale(bin, 0.f, num_bins, 20.f, max - min);
708 }
709 
710 static float get_scale(AVFilterContext *ctx, int scale, float a)
711 {
712  ShowSpectrumContext *s = ctx->priv;
713  const float dmin = s->dmin;
714  const float dmax = s->dmax;
715 
716  a = av_clipf(a, dmin, dmax);
717  if (scale != LOG)
718  a = (a - dmin) / (dmax - dmin);
719 
720  switch (scale) {
721  case LINEAR:
722  break;
723  case SQRT:
724  a = sqrtf(a);
725  break;
726  case CBRT:
727  a = cbrtf(a);
728  break;
729  case FOURTHRT:
730  a = sqrtf(sqrtf(a));
731  break;
732  case FIFTHRT:
733  a = powf(a, 0.2f);
734  break;
735  case LOG:
736  a = (s->drange - s->limit + log10f(a) * 20.f) / s->drange;
737  break;
738  default:
739  av_assert0(0);
740  }
741 
742  return a;
743 }
744 
745 static float get_iscale(AVFilterContext *ctx, int scale, float a)
746 {
747  ShowSpectrumContext *s = ctx->priv;
748  const float dmin = s->dmin;
749  const float dmax = s->dmax;
750 
751  switch (scale) {
752  case LINEAR:
753  break;
754  case SQRT:
755  a = a * a;
756  break;
757  case CBRT:
758  a = a * a * a;
759  break;
760  case FOURTHRT:
761  a = a * a * a * a;
762  break;
763  case FIFTHRT:
764  a = a * a * a * a * a;
765  break;
766  case LOG:
767  a = expf(M_LN10 * (a * s->drange - s->drange + s->limit) / 20.f);
768  break;
769  default:
770  av_assert0(0);
771  }
772 
773  if (scale != LOG)
774  a = a * (dmax - dmin) + dmin;
775 
776  return a;
777 }
778 
779 static int draw_legend(AVFilterContext *ctx, uint64_t samples)
780 {
781  ShowSpectrumContext *s = ctx->priv;
782  AVFilterLink *inlink = ctx->inputs[0];
783  AVFilterLink *outlink = ctx->outputs[0];
784  int ch, y, x = 0, sz = s->orientation == VERTICAL ? s->w : s->h;
785  int multi = (s->mode == SEPARATE && s->color_mode == CHANNEL);
786  float spp = samples / (float)sz;
787  char *text;
788  uint8_t *dst;
789  char chlayout_str[128];
790 
791  av_channel_layout_describe(&inlink->ch_layout, chlayout_str, sizeof(chlayout_str));
792 
793  text = av_asprintf("%d Hz | %s", inlink->sample_rate, chlayout_str);
794  if (!text)
795  return AVERROR(ENOMEM);
796 
797  drawtext(s->outpicref, 2, outlink->h - 10, "CREATED BY LIBAVFILTER", 0);
798  drawtext(s->outpicref, outlink->w - 2 - strlen(text) * 10, outlink->h - 10, text, 0);
799  av_freep(&text);
800  if (s->stop) {
801  text = av_asprintf("Zoom: %d Hz - %d Hz", s->start, s->stop);
802  if (!text)
803  return AVERROR(ENOMEM);
804  drawtext(s->outpicref, outlink->w - 2 - strlen(text) * 10, 3, text, 0);
805  av_freep(&text);
806  }
807 
808  dst = s->outpicref->data[0] + (s->start_y - 1) * s->outpicref->linesize[0] + s->start_x - 1;
809  for (x = 0; x < s->w + 1; x++)
810  dst[x] = 200;
811  dst = s->outpicref->data[0] + (s->start_y + s->h) * s->outpicref->linesize[0] + s->start_x - 1;
812  for (x = 0; x < s->w + 1; x++)
813  dst[x] = 200;
814  for (y = 0; y < s->h + 2; y++) {
815  dst = s->outpicref->data[0] + (y + s->start_y - 1) * s->outpicref->linesize[0];
816  dst[s->start_x - 1] = 200;
817  dst[s->start_x + s->w] = 200;
818  }
819  if (s->orientation == VERTICAL) {
820  int h = s->mode == SEPARATE ? s->h / s->nb_display_channels : s->h;
821  int hh = s->mode == SEPARATE ? -(s->h % s->nb_display_channels) + 1 : 1;
822  for (ch = 0; ch < (s->mode == SEPARATE ? s->nb_display_channels : 1); ch++) {
823  for (y = 0; y < h; y += 20) {
824  dst = s->outpicref->data[0] + (s->start_y + h * (ch + 1) - y - hh) * s->outpicref->linesize[0];
825  dst[s->start_x - 2] = 200;
826  dst[s->start_x + s->w + 1] = 200;
827  }
828  for (y = 0; y < h; y += 40) {
829  dst = s->outpicref->data[0] + (s->start_y + h * (ch + 1) - y - hh) * s->outpicref->linesize[0];
830  dst[s->start_x - 3] = 200;
831  dst[s->start_x + s->w + 2] = 200;
832  }
833  dst = s->outpicref->data[0] + (s->start_y - 2) * s->outpicref->linesize[0] + s->start_x;
834  for (x = 0; x < s->w; x+=40)
835  dst[x] = 200;
836  dst = s->outpicref->data[0] + (s->start_y - 3) * s->outpicref->linesize[0] + s->start_x;
837  for (x = 0; x < s->w; x+=80)
838  dst[x] = 200;
839  dst = s->outpicref->data[0] + (s->h + s->start_y + 1) * s->outpicref->linesize[0] + s->start_x;
840  for (x = 0; x < s->w; x+=40) {
841  dst[x] = 200;
842  }
843  dst = s->outpicref->data[0] + (s->h + s->start_y + 2) * s->outpicref->linesize[0] + s->start_x;
844  for (x = 0; x < s->w; x+=80) {
845  dst[x] = 200;
846  }
847  for (y = 0; y < h; y += 40) {
848  float range = s->stop ? s->stop - s->start : inlink->sample_rate / 2;
849  float hertz = get_hz(y, h, s->start, s->start + range, s->fscale);
850  char *units;
851 
852  if (hertz == 0)
853  units = av_asprintf("DC");
854  else
855  units = av_asprintf("%.2f", hertz);
856  if (!units)
857  return AVERROR(ENOMEM);
858 
859  drawtext(s->outpicref, s->start_x - 8 * strlen(units) - 4, h * (ch + 1) + s->start_y - y - 4 - hh, units, 0);
860  av_free(units);
861  }
862  }
863 
864  for (x = 0; x < s->w && s->single_pic; x+=80) {
865  float seconds = x * spp / inlink->sample_rate;
866  char *units = get_time(ctx, seconds, x);
867  if (!units)
868  return AVERROR(ENOMEM);
869 
870  drawtext(s->outpicref, s->start_x + x - 4 * strlen(units), s->h + s->start_y + 6, units, 0);
871  drawtext(s->outpicref, s->start_x + x - 4 * strlen(units), s->start_y - 12, units, 0);
872  av_free(units);
873  }
874 
875  drawtext(s->outpicref, outlink->w / 2 - 4 * 4, outlink->h - s->start_y / 2, "TIME", 0);
876  drawtext(s->outpicref, s->start_x / 7, outlink->h / 2 - 14 * 4, "FREQUENCY (Hz)", 1);
877  } else {
878  int w = s->mode == SEPARATE ? s->w / s->nb_display_channels : s->w;
879  for (y = 0; y < s->h; y += 20) {
880  dst = s->outpicref->data[0] + (s->start_y + y) * s->outpicref->linesize[0];
881  dst[s->start_x - 2] = 200;
882  dst[s->start_x + s->w + 1] = 200;
883  }
884  for (y = 0; y < s->h; y += 40) {
885  dst = s->outpicref->data[0] + (s->start_y + y) * s->outpicref->linesize[0];
886  dst[s->start_x - 3] = 200;
887  dst[s->start_x + s->w + 2] = 200;
888  }
889  for (ch = 0; ch < (s->mode == SEPARATE ? s->nb_display_channels : 1); ch++) {
890  dst = s->outpicref->data[0] + (s->start_y - 2) * s->outpicref->linesize[0] + s->start_x + w * ch;
891  for (x = 0; x < w; x+=40)
892  dst[x] = 200;
893  dst = s->outpicref->data[0] + (s->start_y - 3) * s->outpicref->linesize[0] + s->start_x + w * ch;
894  for (x = 0; x < w; x+=80)
895  dst[x] = 200;
896  dst = s->outpicref->data[0] + (s->h + s->start_y + 1) * s->outpicref->linesize[0] + s->start_x + w * ch;
897  for (x = 0; x < w; x+=40) {
898  dst[x] = 200;
899  }
900  dst = s->outpicref->data[0] + (s->h + s->start_y + 2) * s->outpicref->linesize[0] + s->start_x + w * ch;
901  for (x = 0; x < w; x+=80) {
902  dst[x] = 200;
903  }
904  for (x = 0; x < w - 79; x += 80) {
905  float range = s->stop ? s->stop - s->start : inlink->sample_rate / 2;
906  float hertz = get_hz(x, w, s->start, s->start + range, s->fscale);
907  char *units;
908 
909  if (hertz == 0)
910  units = av_asprintf("DC");
911  else
912  units = av_asprintf("%.2f", hertz);
913  if (!units)
914  return AVERROR(ENOMEM);
915 
916  drawtext(s->outpicref, s->start_x - 4 * strlen(units) + x + w * ch, s->start_y - 12, units, 0);
917  drawtext(s->outpicref, s->start_x - 4 * strlen(units) + x + w * ch, s->h + s->start_y + 6, units, 0);
918  av_free(units);
919  }
920  }
921  for (y = 0; y < s->h && s->single_pic; y+=40) {
922  float seconds = y * spp / inlink->sample_rate;
923  char *units = get_time(ctx, seconds, x);
924  if (!units)
925  return AVERROR(ENOMEM);
926 
927  drawtext(s->outpicref, s->start_x - 8 * strlen(units) - 4, s->start_y + y - 4, units, 0);
928  av_free(units);
929  }
930  drawtext(s->outpicref, s->start_x / 7, outlink->h / 2 - 4 * 4, "TIME", 1);
931  drawtext(s->outpicref, outlink->w / 2 - 14 * 4, outlink->h - s->start_y / 2, "FREQUENCY (Hz)", 0);
932  }
933 
934  for (ch = 0; ch < (multi ? s->nb_display_channels : 1); ch++) {
935  int h = multi ? s->h / s->nb_display_channels : s->h;
936 
937  for (y = 0; y < h; y++) {
938  float out[4] = { 0., 127.5, 127.5, 0.f};
939  int chn;
940 
941  for (chn = 0; chn < (s->mode == SEPARATE ? 1 : s->nb_display_channels); chn++) {
942  float yf, uf, vf;
943  int channel = (multi) ? s->nb_display_channels - ch - 1 : chn;
944  float lout[4];
945 
946  color_range(s, channel, &yf, &uf, &vf);
947  pick_color(s, yf, uf, vf, y / (float)h, lout);
948  out[0] += lout[0];
949  out[1] += lout[1];
950  out[2] += lout[2];
951  out[3] += lout[3];
952  }
953  memset(s->outpicref->data[0]+(s->start_y + h * (ch + 1) - y - 1) * s->outpicref->linesize[0] + s->w + s->start_x + 20, av_clip_uint8(out[0]), 10);
954  memset(s->outpicref->data[1]+(s->start_y + h * (ch + 1) - y - 1) * s->outpicref->linesize[1] + s->w + s->start_x + 20, av_clip_uint8(out[1]), 10);
955  memset(s->outpicref->data[2]+(s->start_y + h * (ch + 1) - y - 1) * s->outpicref->linesize[2] + s->w + s->start_x + 20, av_clip_uint8(out[2]), 10);
956  if (s->outpicref->data[3])
957  memset(s->outpicref->data[3]+(s->start_y + h * (ch + 1) - y - 1) * s->outpicref->linesize[3] + s->w + s->start_x + 20, av_clip_uint8(out[3]), 10);
958  }
959 
960  for (y = 0; ch == 0 && y < h + 5; y += 25) {
961  static const char *log_fmt = "%.0f";
962  static const char *lin_fmt = "%.3f";
963  const float a = av_clipf(1.f - y / (float)(h - 1), 0.f, 1.f);
964  const float value = s->scale == LOG ? log10f(get_iscale(ctx, s->scale, a)) * 20.f : get_iscale(ctx, s->scale, a);
965  char *text;
966 
967  text = av_asprintf(s->scale == LOG ? log_fmt : lin_fmt, value);
968  if (!text)
969  continue;
970  drawtext(s->outpicref, s->w + s->start_x + 35, s->start_y + y - 3, text, 0);
971  av_free(text);
972  }
973  }
974 
975  if (s->scale == LOG)
976  drawtext(s->outpicref, s->w + s->start_x + 22, s->start_y + s->h + 20, "dBFS", 0);
977 
978  return 0;
979 }
980 
981 static float get_value(AVFilterContext *ctx, int ch, int y)
982 {
983  ShowSpectrumContext *s = ctx->priv;
984  float *magnitudes = s->magnitudes[ch];
985  float *phases = s->phases[ch];
986  float a;
987 
988  switch (s->data) {
989  case D_MAGNITUDE:
990  /* get magnitude */
991  a = magnitudes[y];
992  break;
993  case D_UPHASE:
994  case D_PHASE:
995  /* get phase */
996  a = phases[y];
997  break;
998  default:
999  av_assert0(0);
1000  }
1001 
1002  return av_clipf(get_scale(ctx, s->scale, a), 0.f, 1.f);
1003 }
1004 
1005 static int plot_channel_lin(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
1006 {
1007  ShowSpectrumContext *s = ctx->priv;
1008  const int h = s->orientation == VERTICAL ? s->channel_height : s->channel_width;
1009  const int ch = jobnr;
1010  float yf, uf, vf;
1011  int y;
1012 
1013  /* decide color range */
1014  color_range(s, ch, &yf, &uf, &vf);
1015 
1016  /* draw the channel */
1017  for (y = 0; y < h; y++) {
1018  int row = (s->mode == COMBINED) ? y : ch * h + y;
1019  float *out = &s->color_buffer[ch][4 * row];
1020  float a = get_value(ctx, ch, y);
1021 
1022  pick_color(s, yf, uf, vf, a, out);
1023  }
1024 
1025  return 0;
1026 }
1027 
1028 static int plot_channel_log(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
1029 {
1030  ShowSpectrumContext *s = ctx->priv;
1031  AVFilterLink *inlink = ctx->inputs[0];
1032  const int h = s->orientation == VERTICAL ? s->channel_height : s->channel_width;
1033  const int ch = jobnr;
1034  float yf, uf, vf;
1035 
1036  /* decide color range */
1037  color_range(s, ch, &yf, &uf, &vf);
1038 
1039  /* draw the channel */
1040  for (int yy = 0; yy < h; yy++) {
1041  float range = s->stop ? s->stop - s->start : inlink->sample_rate / 2;
1042  float pos = bin_pos(yy, h, s->start, s->start + range);
1043  float delta = pos - floorf(pos);
1044  float a0, a1;
1045 
1046  a0 = get_value(ctx, ch, av_clip(pos, 0, h-1));
1047  a1 = get_value(ctx, ch, av_clip(pos+1, 0, h-1));
1048  {
1049  int row = (s->mode == COMBINED) ? yy : ch * h + yy;
1050  float *out = &s->color_buffer[ch][4 * row];
1051 
1052  pick_color(s, yf, uf, vf, delta * a1 + (1.f - delta) * a0, out);
1053  }
1054  }
1055 
1056  return 0;
1057 }
1058 
1059 static int config_output(AVFilterLink *outlink)
1060 {
1061  AVFilterContext *ctx = outlink->src;
1062  AVFilterLink *inlink = ctx->inputs[0];
1063  ShowSpectrumContext *s = ctx->priv;
1064  int i, fft_size, h, w, ret;
1065  float overlap;
1066 
1067  s->old_pts = AV_NOPTS_VALUE;
1068  s->dmax = expf(s->limit * M_LN10 / 20.f);
1069  s->dmin = expf((s->limit - s->drange) * M_LN10 / 20.f);
1070 
1071  switch (s->fscale) {
1072  case F_LINEAR: s->plot_channel = plot_channel_lin; break;
1073  case F_LOG: s->plot_channel = plot_channel_log; break;
1074  default: return AVERROR_BUG;
1075  }
1076 
1077  s->stop = FFMIN(s->stop, inlink->sample_rate / 2);
1078  if ((s->stop || s->start) && s->stop <= s->start) {
1079  av_log(ctx, AV_LOG_ERROR, "Stop frequency should be greater than start.\n");
1080  return AVERROR(EINVAL);
1081  }
1082 
1083  if (!strcmp(ctx->filter->name, "showspectrumpic"))
1084  s->single_pic = 1;
1085 
1086  outlink->w = s->w;
1087  outlink->h = s->h;
1088  outlink->sample_aspect_ratio = (AVRational){1,1};
1089 
1090  if (s->legend) {
1091  s->start_x = (log10(inlink->sample_rate) + 1) * 25;
1092  s->start_y = 64;
1093  outlink->w += s->start_x * 2;
1094  outlink->h += s->start_y * 2;
1095  }
1096 
1097  h = (s->mode == COMBINED || s->orientation == HORIZONTAL) ? s->h : s->h / inlink->ch_layout.nb_channels;
1098  w = (s->mode == COMBINED || s->orientation == VERTICAL) ? s->w : s->w / inlink->ch_layout.nb_channels;
1099  s->channel_height = h;
1100  s->channel_width = w;
1101 
1102  if (s->orientation == VERTICAL) {
1103  /* FFT window size (precision) according to the requested output frame height */
1104  fft_size = h * 2;
1105  } else {
1106  /* FFT window size (precision) according to the requested output frame width */
1107  fft_size = w * 2;
1108  }
1109 
1110  s->win_size = fft_size;
1111  s->buf_size = FFALIGN(s->win_size << (!!s->stop), av_cpu_max_align());
1112 
1113  if (!s->fft) {
1114  s->fft = av_calloc(inlink->ch_layout.nb_channels, sizeof(*s->fft));
1115  if (!s->fft)
1116  return AVERROR(ENOMEM);
1117  }
1118 
1119  if (s->stop) {
1120  if (!s->ifft) {
1121  s->ifft = av_calloc(inlink->ch_layout.nb_channels, sizeof(*s->ifft));
1122  if (!s->ifft)
1123  return AVERROR(ENOMEM);
1124  }
1125  }
1126 
1127  /* (re-)configuration if the video output changed (or first init) */
1128  if (fft_size != s->fft_size) {
1129  AVFrame *outpicref;
1130 
1131  s->fft_size = fft_size;
1132 
1133  /* FFT buffers: x2 for each (display) channel buffer.
1134  * Note: we use free and malloc instead of a realloc-like function to
1135  * make sure the buffer is aligned in memory for the FFT functions. */
1136  for (i = 0; i < s->nb_display_channels; i++) {
1137  if (s->stop) {
1138  av_tx_uninit(&s->ifft[i]);
1139  av_freep(&s->fft_scratch[i]);
1140  }
1141  av_tx_uninit(&s->fft[i]);
1142  av_freep(&s->fft_in[i]);
1143  av_freep(&s->fft_data[i]);
1144  }
1145  av_freep(&s->fft_data);
1146 
1147  s->nb_display_channels = inlink->ch_layout.nb_channels;
1148  for (i = 0; i < s->nb_display_channels; i++) {
1149  float scale = 1.f;
1150 
1151  ret = av_tx_init(&s->fft[i], &s->tx_fn, AV_TX_FLOAT_FFT, 0, fft_size << (!!s->stop), &scale, 0);
1152  if (s->stop) {
1153  ret = av_tx_init(&s->ifft[i], &s->itx_fn, AV_TX_FLOAT_FFT, 1, fft_size << (!!s->stop), &scale, 0);
1154  if (ret < 0) {
1155  av_log(ctx, AV_LOG_ERROR, "Unable to create Inverse FFT context. "
1156  "The window size might be too high.\n");
1157  return ret;
1158  }
1159  }
1160  if (ret < 0) {
1161  av_log(ctx, AV_LOG_ERROR, "Unable to create FFT context. "
1162  "The window size might be too high.\n");
1163  return ret;
1164  }
1165  }
1166 
1167  s->magnitudes = av_calloc(s->nb_display_channels, sizeof(*s->magnitudes));
1168  if (!s->magnitudes)
1169  return AVERROR(ENOMEM);
1170  for (i = 0; i < s->nb_display_channels; i++) {
1171  s->magnitudes[i] = av_calloc(s->orientation == VERTICAL ? s->h : s->w, sizeof(**s->magnitudes));
1172  if (!s->magnitudes[i])
1173  return AVERROR(ENOMEM);
1174  }
1175 
1176  s->phases = av_calloc(s->nb_display_channels, sizeof(*s->phases));
1177  if (!s->phases)
1178  return AVERROR(ENOMEM);
1179  for (i = 0; i < s->nb_display_channels; i++) {
1180  s->phases[i] = av_calloc(s->orientation == VERTICAL ? s->h : s->w, sizeof(**s->phases));
1181  if (!s->phases[i])
1182  return AVERROR(ENOMEM);
1183  }
1184 
1185  av_freep(&s->color_buffer);
1186  s->color_buffer = av_calloc(s->nb_display_channels, sizeof(*s->color_buffer));
1187  if (!s->color_buffer)
1188  return AVERROR(ENOMEM);
1189  for (i = 0; i < s->nb_display_channels; i++) {
1190  s->color_buffer[i] = av_calloc(s->orientation == VERTICAL ? s->h * 4 : s->w * 4, sizeof(**s->color_buffer));
1191  if (!s->color_buffer[i])
1192  return AVERROR(ENOMEM);
1193  }
1194 
1195  s->fft_in = av_calloc(s->nb_display_channels, sizeof(*s->fft_in));
1196  if (!s->fft_in)
1197  return AVERROR(ENOMEM);
1198  s->fft_data = av_calloc(s->nb_display_channels, sizeof(*s->fft_data));
1199  if (!s->fft_data)
1200  return AVERROR(ENOMEM);
1201  s->fft_scratch = av_calloc(s->nb_display_channels, sizeof(*s->fft_scratch));
1202  if (!s->fft_scratch)
1203  return AVERROR(ENOMEM);
1204  for (i = 0; i < s->nb_display_channels; i++) {
1205  s->fft_in[i] = av_calloc(s->buf_size, sizeof(**s->fft_in));
1206  if (!s->fft_in[i])
1207  return AVERROR(ENOMEM);
1208 
1209  s->fft_data[i] = av_calloc(s->buf_size, sizeof(**s->fft_data));
1210  if (!s->fft_data[i])
1211  return AVERROR(ENOMEM);
1212 
1213  s->fft_scratch[i] = av_calloc(s->buf_size, sizeof(**s->fft_scratch));
1214  if (!s->fft_scratch[i])
1215  return AVERROR(ENOMEM);
1216  }
1217 
1218  /* pre-calc windowing function */
1219  s->window_func_lut =
1220  av_realloc_f(s->window_func_lut, s->win_size,
1221  sizeof(*s->window_func_lut));
1222  if (!s->window_func_lut)
1223  return AVERROR(ENOMEM);
1224  generate_window_func(s->window_func_lut, s->win_size, s->win_func, &overlap);
1225  if (s->overlap == 1)
1226  s->overlap = overlap;
1227  s->hop_size = (1.f - s->overlap) * s->win_size;
1228  if (s->hop_size < 1) {
1229  av_log(ctx, AV_LOG_ERROR, "overlap %f too big\n", s->overlap);
1230  return AVERROR(EINVAL);
1231  }
1232 
1233  for (s->win_scale = 0, i = 0; i < s->win_size; i++) {
1234  s->win_scale += s->window_func_lut[i] * s->window_func_lut[i];
1235  }
1236  s->win_scale = 1.f / sqrtf(s->win_scale);
1237 
1238  /* prepare the initial picref buffer (black frame) */
1239  av_frame_free(&s->outpicref);
1240  s->outpicref = outpicref =
1241  ff_get_video_buffer(outlink, outlink->w, outlink->h);
1242  if (!outpicref)
1243  return AVERROR(ENOMEM);
1244  outpicref->sample_aspect_ratio = (AVRational){1,1};
1245  for (i = 0; i < outlink->h; i++) {
1246  memset(outpicref->data[0] + i * outpicref->linesize[0], 0, outlink->w);
1247  memset(outpicref->data[1] + i * outpicref->linesize[1], 128, outlink->w);
1248  memset(outpicref->data[2] + i * outpicref->linesize[2], 128, outlink->w);
1249  if (outpicref->data[3])
1250  memset(outpicref->data[3] + i * outpicref->linesize[3], 0, outlink->w);
1251  }
1252  outpicref->color_range = AVCOL_RANGE_JPEG;
1253 
1254  if (!s->single_pic && s->legend)
1255  draw_legend(ctx, 0);
1256  }
1257 
1258  if ((s->orientation == VERTICAL && s->xpos >= s->w) ||
1259  (s->orientation == HORIZONTAL && s->xpos >= s->h))
1260  s->xpos = 0;
1261 
1262  if (s->sliding == LREPLACE) {
1263  if (s->orientation == VERTICAL)
1264  s->xpos = s->w - 1;
1265  if (s->orientation == HORIZONTAL)
1266  s->xpos = s->h - 1;
1267  }
1268 
1269  s->auto_frame_rate = av_make_q(inlink->sample_rate, s->hop_size);
1270  if (s->orientation == VERTICAL && s->sliding == FULLFRAME)
1271  s->auto_frame_rate = av_mul_q(s->auto_frame_rate, av_make_q(1, s->w));
1272  if (s->orientation == HORIZONTAL && s->sliding == FULLFRAME)
1273  s->auto_frame_rate = av_mul_q(s->auto_frame_rate, av_make_q(1, s->h));
1274  if (!s->single_pic && strcmp(s->rate_str, "auto")) {
1275  int ret = av_parse_video_rate(&s->frame_rate, s->rate_str);
1276  if (ret < 0)
1277  return ret;
1278  } else if (s->single_pic) {
1279  s->frame_rate = av_make_q(1, 1);
1280  } else {
1281  s->frame_rate = s->auto_frame_rate;
1282  }
1283  outlink->frame_rate = s->frame_rate;
1284  outlink->time_base = av_inv_q(outlink->frame_rate);
1285 
1286  if (s->orientation == VERTICAL) {
1287  s->combine_buffer =
1288  av_realloc_f(s->combine_buffer, s->h * 4,
1289  sizeof(*s->combine_buffer));
1290  } else {
1291  s->combine_buffer =
1292  av_realloc_f(s->combine_buffer, s->w * 4,
1293  sizeof(*s->combine_buffer));
1294  }
1295 
1296  av_log(ctx, AV_LOG_VERBOSE, "s:%dx%d FFT window size:%d\n",
1297  s->w, s->h, s->win_size);
1298 
1299  s->in_frame = ff_get_audio_buffer(inlink, s->win_size);
1300  if (!s->in_frame)
1301  return AVERROR(ENOMEM);
1302 
1303  s->frames = av_fast_realloc(NULL, &s->frames_size,
1304  DEFAULT_LENGTH * sizeof(*(s->frames)));
1305  if (!s->frames)
1306  return AVERROR(ENOMEM);
1307 
1308  return 0;
1309 }
1310 
1311 #define RE(y, ch) s->fft_data[ch][y].re
1312 #define IM(y, ch) s->fft_data[ch][y].im
1313 #define MAGNITUDE(y, ch) hypotf(RE(y, ch), IM(y, ch))
1314 #define PHASE(y, ch) atan2f(IM(y, ch), RE(y, ch))
1315 
1316 static int calc_channel_magnitudes(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
1317 {
1318  ShowSpectrumContext *s = ctx->priv;
1319  const double w = s->win_scale * (s->scale == LOG ? s->win_scale : 1);
1320  int y, h = s->orientation == VERTICAL ? s->h : s->w;
1321  const float f = s->gain * w;
1322  const int ch = jobnr;
1323  float *magnitudes = s->magnitudes[ch];
1324 
1325  for (y = 0; y < h; y++)
1326  magnitudes[y] = MAGNITUDE(y, ch) * f;
1327 
1328  return 0;
1329 }
1330 
1331 static int calc_channel_phases(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
1332 {
1333  ShowSpectrumContext *s = ctx->priv;
1334  const int h = s->orientation == VERTICAL ? s->h : s->w;
1335  const int ch = jobnr;
1336  float *phases = s->phases[ch];
1337  int y;
1338 
1339  for (y = 0; y < h; y++)
1340  phases[y] = (PHASE(y, ch) / M_PI + 1) / 2;
1341 
1342  return 0;
1343 }
1344 
1345 static void unwrap(float *x, int N, float tol, float *mi, float *ma)
1346 {
1347  const float rng = 2.f * M_PI;
1348  float prev_p = 0.f;
1349  float max = -FLT_MAX;
1350  float min = FLT_MAX;
1351 
1352  for (int i = 0; i < N; i++) {
1353  const float d = x[FFMIN(i + 1, N)] - x[i];
1354  const float p = ceilf(fabsf(d) / rng) * rng * (((d < tol) > 0.f) - ((d > -tol) > 0.f));
1355 
1356  x[i] += p + prev_p;
1357  prev_p += p;
1358  max = fmaxf(x[i], max);
1359  min = fminf(x[i], min);
1360  }
1361 
1362  *mi = min;
1363  *ma = max;
1364 }
1365 
1366 static int calc_channel_uphases(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
1367 {
1368  ShowSpectrumContext *s = ctx->priv;
1369  const int h = s->orientation == VERTICAL ? s->h : s->w;
1370  const int ch = jobnr;
1371  float *phases = s->phases[ch];
1372  float min, max, scale;
1373  int y;
1374 
1375  for (y = 0; y < h; y++)
1376  phases[y] = PHASE(y, ch);
1377  unwrap(phases, h, M_PI, &min, &max);
1378  scale = 1.f / (max - min + FLT_MIN);
1379  for (y = 0; y < h; y++)
1380  phases[y] = fabsf((phases[y] - min) * scale);
1381 
1382  return 0;
1383 }
1384 
1386 {
1387  const double w = s->win_scale * (s->scale == LOG ? s->win_scale : 1);
1388  int ch, y, h = s->orientation == VERTICAL ? s->h : s->w;
1389  const float f = s->gain * w;
1390 
1391  for (ch = 0; ch < s->nb_display_channels; ch++) {
1392  float *magnitudes = s->magnitudes[ch];
1393 
1394  for (y = 0; y < h; y++)
1395  magnitudes[y] += MAGNITUDE(y, ch) * f;
1396  }
1397 }
1398 
1400 {
1401  int ch, y, h = s->orientation == VERTICAL ? s->h : s->w;
1402 
1403  for (ch = 0; ch < s->nb_display_channels; ch++) {
1404  float *magnitudes = s->magnitudes[ch];
1405 
1406  for (y = 0; y < h; y++)
1407  magnitudes[y] *= scale;
1408  }
1409 }
1410 
1412 {
1413  int y;
1414 
1415  for (y = 0; y < size; y++) {
1416  s->combine_buffer[4 * y ] = 0;
1417  s->combine_buffer[4 * y + 1] = 127.5;
1418  s->combine_buffer[4 * y + 2] = 127.5;
1419  s->combine_buffer[4 * y + 3] = 0;
1420  }
1421 }
1422 
1424 {
1425  AVFilterContext *ctx = inlink->dst;
1426  AVFilterLink *outlink = ctx->outputs[0];
1427  ShowSpectrumContext *s = ctx->priv;
1428  AVFrame *outpicref = s->outpicref;
1429  int ret, plane, x, y, z = s->orientation == VERTICAL ? s->h : s->w;
1430  const int alpha = outpicref->data[3] != NULL;
1431 
1432  /* fill a new spectrum column */
1433  /* initialize buffer for combining to black */
1434  clear_combine_buffer(s, z);
1435 
1436  ff_filter_execute(ctx, s->plot_channel, NULL, NULL, s->nb_display_channels);
1437 
1438  for (y = 0; y < z * 4; y++) {
1439  for (x = 0; x < s->nb_display_channels; x++) {
1440  s->combine_buffer[y] += s->color_buffer[x][y];
1441  }
1442  }
1443 
1444  ret = ff_inlink_make_frame_writable(outlink, &s->outpicref);
1445  if (ret < 0)
1446  return ret;
1447  outpicref = s->outpicref;
1448  /* copy to output */
1449  if (s->orientation == VERTICAL) {
1450  if (s->sliding == SCROLL) {
1451  for (plane = 0; plane < 3 + alpha; plane++) {
1452  for (y = 0; y < s->h; y++) {
1453  uint8_t *p = outpicref->data[plane] + s->start_x +
1454  (y + s->start_y) * outpicref->linesize[plane];
1455  memmove(p, p + 1, s->w - 1);
1456  }
1457  }
1458  s->xpos = s->w - 1;
1459  } else if (s->sliding == RSCROLL) {
1460  for (plane = 0; plane < 3 + alpha; plane++) {
1461  for (y = 0; y < s->h; y++) {
1462  uint8_t *p = outpicref->data[plane] + s->start_x +
1463  (y + s->start_y) * outpicref->linesize[plane];
1464  memmove(p + 1, p, s->w - 1);
1465  }
1466  }
1467  s->xpos = 0;
1468  }
1469  for (plane = 0; plane < 3; plane++) {
1470  uint8_t *p = outpicref->data[plane] + s->start_x +
1471  (outlink->h - 1 - s->start_y) * outpicref->linesize[plane] +
1472  s->xpos;
1473  for (y = 0; y < s->h; y++) {
1474  *p = lrintf(av_clipf(s->combine_buffer[4 * y + plane], 0, 255));
1475  p -= outpicref->linesize[plane];
1476  }
1477  }
1478  if (alpha) {
1479  uint8_t *p = outpicref->data[3] + s->start_x +
1480  (outlink->h - 1 - s->start_y) * outpicref->linesize[3] +
1481  s->xpos;
1482  for (y = 0; y < s->h; y++) {
1483  *p = lrintf(av_clipf(s->combine_buffer[4 * y + 3], 0, 255));
1484  p -= outpicref->linesize[3];
1485  }
1486  }
1487  } else {
1488  if (s->sliding == SCROLL) {
1489  for (plane = 0; plane < 3 + alpha; plane++) {
1490  for (y = 1; y < s->h; y++) {
1491  memmove(outpicref->data[plane] + (y-1 + s->start_y) * outpicref->linesize[plane] + s->start_x,
1492  outpicref->data[plane] + (y + s->start_y) * outpicref->linesize[plane] + s->start_x,
1493  s->w);
1494  }
1495  }
1496  s->xpos = s->h - 1;
1497  } else if (s->sliding == RSCROLL) {
1498  for (plane = 0; plane < 3 + alpha; plane++) {
1499  for (y = s->h - 1; y >= 1; y--) {
1500  memmove(outpicref->data[plane] + (y + s->start_y) * outpicref->linesize[plane] + s->start_x,
1501  outpicref->data[plane] + (y-1 + s->start_y) * outpicref->linesize[plane] + s->start_x,
1502  s->w);
1503  }
1504  }
1505  s->xpos = 0;
1506  }
1507  for (plane = 0; plane < 3; plane++) {
1508  uint8_t *p = outpicref->data[plane] + s->start_x +
1509  (s->xpos + s->start_y) * outpicref->linesize[plane];
1510  for (x = 0; x < s->w; x++) {
1511  *p = lrintf(av_clipf(s->combine_buffer[4 * x + plane], 0, 255));
1512  p++;
1513  }
1514  }
1515  if (alpha) {
1516  uint8_t *p = outpicref->data[3] + s->start_x +
1517  (s->xpos + s->start_y) * outpicref->linesize[3];
1518  for (x = 0; x < s->w; x++) {
1519  *p = lrintf(av_clipf(s->combine_buffer[4 * x + 3], 0, 255));
1520  p++;
1521  }
1522  }
1523  }
1524 
1525  if (s->sliding != FULLFRAME || s->xpos == 0)
1526  s->pts = outpicref->pts = av_rescale_q(s->in_pts, inlink->time_base, outlink->time_base);
1527 
1528  if (s->sliding == LREPLACE) {
1529  s->xpos--;
1530  if (s->orientation == VERTICAL && s->xpos < 0)
1531  s->xpos = s->w - 1;
1532  if (s->orientation == HORIZONTAL && s->xpos < 0)
1533  s->xpos = s->h - 1;
1534  } else {
1535  s->xpos++;
1536  if (s->orientation == VERTICAL && s->xpos >= s->w)
1537  s->xpos = 0;
1538  if (s->orientation == HORIZONTAL && s->xpos >= s->h)
1539  s->xpos = 0;
1540  }
1541 
1542  if (!s->single_pic && (s->sliding != FULLFRAME || s->xpos == 0)) {
1543  if (s->old_pts < outpicref->pts || s->sliding == FULLFRAME ||
1545  ff_inlink_queued_samples(inlink) <= s->hop_size)) {
1546  AVFrame *clone;
1547 
1548  if (s->legend) {
1549  char *units = get_time(ctx, insamples->pts /(float)inlink->sample_rate, x);
1550  if (!units)
1551  return AVERROR(ENOMEM);
1552 
1553  if (s->orientation == VERTICAL) {
1554  for (y = 0; y < 10; y++) {
1555  memset(s->outpicref->data[0] + outlink->w / 2 - 4 * s->old_len +
1556  (outlink->h - s->start_y / 2 - 20 + y) * s->outpicref->linesize[0], 0, 10 * s->old_len);
1557  }
1558  drawtext(s->outpicref,
1559  outlink->w / 2 - 4 * strlen(units),
1560  outlink->h - s->start_y / 2 - 20,
1561  units, 0);
1562  } else {
1563  for (y = 0; y < 10 * s->old_len; y++) {
1564  memset(s->outpicref->data[0] + s->start_x / 7 + 20 +
1565  (outlink->h / 2 - 4 * s->old_len + y) * s->outpicref->linesize[0], 0, 10);
1566  }
1567  drawtext(s->outpicref,
1568  s->start_x / 7 + 20,
1569  outlink->h / 2 - 4 * strlen(units),
1570  units, 1);
1571  }
1572  s->old_len = strlen(units);
1573  av_free(units);
1574  }
1575  s->old_pts = outpicref->pts;
1576  clone = av_frame_clone(s->outpicref);
1577  if (!clone)
1578  return AVERROR(ENOMEM);
1579  ret = ff_filter_frame(outlink, clone);
1580  if (ret < 0)
1581  return ret;
1582  return 0;
1583  }
1584  }
1585 
1586  return 1;
1587 }
1588 
1589 #if CONFIG_SHOWSPECTRUM_FILTER
1590 
1591 static int activate(AVFilterContext *ctx)
1592 {
1593  AVFilterLink *inlink = ctx->inputs[0];
1594  AVFilterLink *outlink = ctx->outputs[0];
1595  ShowSpectrumContext *s = ctx->priv;
1596  int ret, status;
1597  int64_t pts;
1598 
1600 
1601  if (s->outpicref) {
1602  AVFrame *fin;
1603 
1604  ret = ff_inlink_consume_samples(inlink, s->hop_size, s->hop_size, &fin);
1605  if (ret < 0)
1606  return ret;
1607  if (ret > 0) {
1608  ff_filter_execute(ctx, run_channel_fft, fin, NULL, s->nb_display_channels);
1609 
1610  if (s->data == D_MAGNITUDE)
1611  ff_filter_execute(ctx, calc_channel_magnitudes, NULL, NULL, s->nb_display_channels);
1612 
1613  if (s->data == D_PHASE)
1614  ff_filter_execute(ctx, calc_channel_phases, NULL, NULL, s->nb_display_channels);
1615 
1616  if (s->data == D_UPHASE)
1617  ff_filter_execute(ctx, calc_channel_uphases, NULL, NULL, s->nb_display_channels);
1618 
1619  if (s->sliding != FULLFRAME || s->xpos == 0)
1620  s->in_pts = fin->pts;
1622  av_frame_free(&fin);
1623  if (ret <= 0)
1624  return ret;
1625  }
1626  }
1627 
1629  s->sliding == FULLFRAME &&
1630  s->xpos > 0 && s->outpicref) {
1631 
1632  if (s->orientation == VERTICAL) {
1633  for (int i = 0; i < outlink->h; i++) {
1634  memset(s->outpicref->data[0] + i * s->outpicref->linesize[0] + s->xpos, 0, outlink->w - s->xpos);
1635  memset(s->outpicref->data[1] + i * s->outpicref->linesize[1] + s->xpos, 128, outlink->w - s->xpos);
1636  memset(s->outpicref->data[2] + i * s->outpicref->linesize[2] + s->xpos, 128, outlink->w - s->xpos);
1637  if (s->outpicref->data[3])
1638  memset(s->outpicref->data[3] + i * s->outpicref->linesize[3] + s->xpos, 0, outlink->w - s->xpos);
1639  }
1640  } else {
1641  for (int i = s->xpos; i < outlink->h; i++) {
1642  memset(s->outpicref->data[0] + i * s->outpicref->linesize[0], 0, outlink->w);
1643  memset(s->outpicref->data[1] + i * s->outpicref->linesize[1], 128, outlink->w);
1644  memset(s->outpicref->data[2] + i * s->outpicref->linesize[2], 128, outlink->w);
1645  if (s->outpicref->data[3])
1646  memset(s->outpicref->data[3] + i * s->outpicref->linesize[3], 0, outlink->w);
1647  }
1648  }
1649  s->outpicref->pts = av_rescale_q(s->in_pts, inlink->time_base, outlink->time_base);
1650  pts = s->outpicref->pts;
1651  ret = ff_filter_frame(outlink, s->outpicref);
1652  s->outpicref = NULL;
1654  return 0;
1655  }
1656 
1658  if (status == AVERROR_EOF) {
1659  ff_outlink_set_status(outlink, status, s->pts);
1660  return 0;
1661  }
1662  }
1663 
1664  if (ff_inlink_queued_samples(inlink) >= s->hop_size) {
1665  ff_filter_set_ready(ctx, 10);
1666  return 0;
1667  }
1668 
1669  if (ff_outlink_frame_wanted(outlink)) {
1671  return 0;
1672  }
1673 
1674  return FFERROR_NOT_READY;
1675 }
1676 
1677 static const AVFilterPad showspectrum_inputs[] = {
1678  {
1679  .name = "default",
1680  .type = AVMEDIA_TYPE_AUDIO,
1681  },
1682 };
1683 
1684 static const AVFilterPad showspectrum_outputs[] = {
1685  {
1686  .name = "default",
1687  .type = AVMEDIA_TYPE_VIDEO,
1688  .config_props = config_output,
1689  },
1690 };
1691 
1692 const AVFilter ff_avf_showspectrum = {
1693  .name = "showspectrum",
1694  .description = NULL_IF_CONFIG_SMALL("Convert input audio to a spectrum video output."),
1695  .uninit = uninit,
1696  .priv_size = sizeof(ShowSpectrumContext),
1697  FILTER_INPUTS(showspectrum_inputs),
1698  FILTER_OUTPUTS(showspectrum_outputs),
1700  .activate = activate,
1701  .priv_class = &showspectrum_class,
1702  .flags = AVFILTER_FLAG_SLICE_THREADS,
1703 };
1704 #endif // CONFIG_SHOWSPECTRUM_FILTER
1705 
1706 #if CONFIG_SHOWSPECTRUMPIC_FILTER
1707 
1708 static const AVOption showspectrumpic_options[] = {
1709  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "4096x2048"}, 0, 0, FLAGS },
1710  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "4096x2048"}, 0, 0, FLAGS },
1711  { "mode", "set channel display mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=COMBINED}, 0, NB_MODES-1, FLAGS, "mode" },
1712  { "combined", "combined mode", 0, AV_OPT_TYPE_CONST, {.i64=COMBINED}, 0, 0, FLAGS, "mode" },
1713  { "separate", "separate mode", 0, AV_OPT_TYPE_CONST, {.i64=SEPARATE}, 0, 0, FLAGS, "mode" },
1714  { "color", "set channel coloring", OFFSET(color_mode), AV_OPT_TYPE_INT, {.i64=INTENSITY}, 0, NB_CLMODES-1, FLAGS, "color" },
1715  { "channel", "separate color for each channel", 0, AV_OPT_TYPE_CONST, {.i64=CHANNEL}, 0, 0, FLAGS, "color" },
1716  { "intensity", "intensity based coloring", 0, AV_OPT_TYPE_CONST, {.i64=INTENSITY}, 0, 0, FLAGS, "color" },
1717  { "rainbow", "rainbow based coloring", 0, AV_OPT_TYPE_CONST, {.i64=RAINBOW}, 0, 0, FLAGS, "color" },
1718  { "moreland", "moreland based coloring", 0, AV_OPT_TYPE_CONST, {.i64=MORELAND}, 0, 0, FLAGS, "color" },
1719  { "nebulae", "nebulae based coloring", 0, AV_OPT_TYPE_CONST, {.i64=NEBULAE}, 0, 0, FLAGS, "color" },
1720  { "fire", "fire based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FIRE}, 0, 0, FLAGS, "color" },
1721  { "fiery", "fiery based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FIERY}, 0, 0, FLAGS, "color" },
1722  { "fruit", "fruit based coloring", 0, AV_OPT_TYPE_CONST, {.i64=FRUIT}, 0, 0, FLAGS, "color" },
1723  { "cool", "cool based coloring", 0, AV_OPT_TYPE_CONST, {.i64=COOL}, 0, 0, FLAGS, "color" },
1724  { "magma", "magma based coloring", 0, AV_OPT_TYPE_CONST, {.i64=MAGMA}, 0, 0, FLAGS, "color" },
1725  { "green", "green based coloring", 0, AV_OPT_TYPE_CONST, {.i64=GREEN}, 0, 0, FLAGS, "color" },
1726  { "viridis", "viridis based coloring", 0, AV_OPT_TYPE_CONST, {.i64=VIRIDIS}, 0, 0, FLAGS, "color" },
1727  { "plasma", "plasma based coloring", 0, AV_OPT_TYPE_CONST, {.i64=PLASMA}, 0, 0, FLAGS, "color" },
1728  { "cividis", "cividis based coloring", 0, AV_OPT_TYPE_CONST, {.i64=CIVIDIS}, 0, 0, FLAGS, "color" },
1729  { "terrain", "terrain based coloring", 0, AV_OPT_TYPE_CONST, {.i64=TERRAIN}, 0, 0, FLAGS, "color" },
1730  { "scale", "set display scale", OFFSET(scale), AV_OPT_TYPE_INT, {.i64=LOG}, 0, NB_SCALES-1, FLAGS, "scale" },
1731  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=LINEAR}, 0, 0, FLAGS, "scale" },
1732  { "sqrt", "square root", 0, AV_OPT_TYPE_CONST, {.i64=SQRT}, 0, 0, FLAGS, "scale" },
1733  { "cbrt", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64=CBRT}, 0, 0, FLAGS, "scale" },
1734  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=LOG}, 0, 0, FLAGS, "scale" },
1735  { "4thrt","4th root", 0, AV_OPT_TYPE_CONST, {.i64=FOURTHRT}, 0, 0, FLAGS, "scale" },
1736  { "5thrt","5th root", 0, AV_OPT_TYPE_CONST, {.i64=FIFTHRT}, 0, 0, FLAGS, "scale" },
1737  { "fscale", "set frequency scale", OFFSET(fscale), AV_OPT_TYPE_INT, {.i64=F_LINEAR}, 0, NB_FSCALES-1, FLAGS, "fscale" },
1738  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=F_LINEAR}, 0, 0, FLAGS, "fscale" },
1739  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=F_LOG}, 0, 0, FLAGS, "fscale" },
1740  { "saturation", "color saturation multiplier", OFFSET(saturation), AV_OPT_TYPE_FLOAT, {.dbl = 1}, -10, 10, FLAGS },
1741  WIN_FUNC_OPTION("win_func", OFFSET(win_func), FLAGS, WFUNC_HANNING),
1742  { "orientation", "set orientation", OFFSET(orientation), AV_OPT_TYPE_INT, {.i64=VERTICAL}, 0, NB_ORIENTATIONS-1, FLAGS, "orientation" },
1743  { "vertical", NULL, 0, AV_OPT_TYPE_CONST, {.i64=VERTICAL}, 0, 0, FLAGS, "orientation" },
1744  { "horizontal", NULL, 0, AV_OPT_TYPE_CONST, {.i64=HORIZONTAL}, 0, 0, FLAGS, "orientation" },
1745  { "gain", "set scale gain", OFFSET(gain), AV_OPT_TYPE_FLOAT, {.dbl = 1}, 0, 128, FLAGS },
1746  { "legend", "draw legend", OFFSET(legend), AV_OPT_TYPE_BOOL, {.i64 = 1}, 0, 1, FLAGS },
1747  { "rotation", "color rotation", OFFSET(rotation), AV_OPT_TYPE_FLOAT, {.dbl = 0}, -1, 1, FLAGS },
1748  { "start", "start frequency", OFFSET(start), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT32_MAX, FLAGS },
1749  { "stop", "stop frequency", OFFSET(stop), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT32_MAX, FLAGS },
1750  { "drange", "set dynamic range in dBFS", OFFSET(drange), AV_OPT_TYPE_FLOAT, {.dbl = 120}, 10, 200, FLAGS },
1751  { "limit", "set upper limit in dBFS", OFFSET(limit), AV_OPT_TYPE_FLOAT, {.dbl = 0}, -100, 100, FLAGS },
1752  { "opacity", "set opacity strength", OFFSET(opacity_factor), AV_OPT_TYPE_FLOAT, {.dbl = 1}, 0, 10, FLAGS },
1753  { NULL }
1754 };
1755 
1756 AVFILTER_DEFINE_CLASS(showspectrumpic);
1757 
1758 static int showspectrumpic_request_frame(AVFilterLink *outlink)
1759 {
1760  AVFilterContext *ctx = outlink->src;
1761  ShowSpectrumContext *s = ctx->priv;
1762  AVFilterLink *inlink = ctx->inputs[0];
1763  int ret;
1764 
1766  if (ret == AVERROR_EOF && s->outpicref && s->samples > 0) {
1767  int consumed = 0;
1768  int x = 0, sz = s->orientation == VERTICAL ? s->w : s->h;
1769  unsigned int nb_frame = 0;
1770  int ch, spf, spb;
1771  int src_offset = 0;
1772  AVFrame *fin;
1773 
1774  spf = s->win_size * (s->samples / ((s->win_size * sz) * ceil(s->samples / (float)(s->win_size * sz))));
1775  spf = FFMAX(1, spf);
1776  s->hop_size = spf;
1777 
1778  spb = (s->samples / (spf * sz)) * spf;
1779 
1780  fin = ff_get_audio_buffer(inlink, spf);
1781  if (!fin)
1782  return AVERROR(ENOMEM);
1783 
1784  while (x < sz) {
1785  int acc_samples = 0;
1786  int dst_offset = 0;
1787 
1788  while (nb_frame <= s->nb_frames) {
1789  AVFrame *cur_frame = s->frames[nb_frame];
1790  int cur_frame_samples = cur_frame->nb_samples;
1791  int nb_samples = 0;
1792 
1793  if (acc_samples < spf) {
1794  nb_samples = FFMIN(spf - acc_samples, cur_frame_samples - src_offset);
1795  acc_samples += nb_samples;
1796  av_samples_copy(fin->extended_data, cur_frame->extended_data,
1797  dst_offset, src_offset, nb_samples,
1799  }
1800 
1801  src_offset += nb_samples;
1802  dst_offset += nb_samples;
1803  if (cur_frame_samples <= src_offset) {
1804  av_frame_free(&s->frames[nb_frame]);
1805  nb_frame++;
1806  src_offset = 0;
1807  }
1808 
1809  if (acc_samples == spf)
1810  break;
1811  }
1812 
1813  ff_filter_execute(ctx, run_channel_fft, fin, NULL, s->nb_display_channels);
1815 
1816  consumed += spf;
1817  if (consumed >= spb) {
1818  int h = s->orientation == VERTICAL ? s->h : s->w;
1819 
1820  scale_magnitudes(s, 1.f / (consumed / spf));
1822  consumed = 0;
1823  x++;
1824  for (ch = 0; ch < s->nb_display_channels; ch++)
1825  memset(s->magnitudes[ch], 0, h * sizeof(float));
1826  }
1827  }
1828 
1829  av_frame_free(&fin);
1830  s->outpicref->pts = 0;
1831 
1832  if (s->legend)
1833  draw_legend(ctx, s->samples);
1834 
1835  ret = ff_filter_frame(outlink, s->outpicref);
1836  s->outpicref = NULL;
1837  }
1838 
1839  return ret;
1840 }
1841 
1842 static int showspectrumpic_filter_frame(AVFilterLink *inlink, AVFrame *insamples)
1843 {
1844  AVFilterContext *ctx = inlink->dst;
1845  ShowSpectrumContext *s = ctx->priv;
1846  void *ptr;
1847 
1848  if (s->nb_frames + 1ULL > s->frames_size / sizeof(*(s->frames))) {
1849  ptr = av_fast_realloc(s->frames, &s->frames_size, s->frames_size * 2);
1850  if (!ptr)
1851  return AVERROR(ENOMEM);
1852  s->frames = ptr;
1853  }
1854 
1855  s->frames[s->nb_frames] = insamples;
1856  s->samples += insamples->nb_samples;
1857  s->nb_frames++;
1858 
1859  return 0;
1860 }
1861 
1862 static const AVFilterPad showspectrumpic_inputs[] = {
1863  {
1864  .name = "default",
1865  .type = AVMEDIA_TYPE_AUDIO,
1866  .filter_frame = showspectrumpic_filter_frame,
1867  },
1868 };
1869 
1870 static const AVFilterPad showspectrumpic_outputs[] = {
1871  {
1872  .name = "default",
1873  .type = AVMEDIA_TYPE_VIDEO,
1874  .config_props = config_output,
1875  .request_frame = showspectrumpic_request_frame,
1876  },
1877 };
1878 
1880  .name = "showspectrumpic",
1881  .description = NULL_IF_CONFIG_SMALL("Convert input audio to a spectrum video output single picture."),
1882  .uninit = uninit,
1883  .priv_size = sizeof(ShowSpectrumContext),
1884  FILTER_INPUTS(showspectrumpic_inputs),
1885  FILTER_OUTPUTS(showspectrumpic_outputs),
1887  .priv_class = &showspectrumpic_class,
1888  .flags = AVFILTER_FLAG_SLICE_THREADS,
1889 };
1890 
1891 #endif // CONFIG_SHOWSPECTRUMPIC_FILTER
M
#define M(a, b)
Definition: vp3dsp.c:48
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:101
ff_get_audio_buffer
AVFrame * ff_get_audio_buffer(AVFilterLink *link, int nb_samples)
Request an audio samples buffer with a specific set of permissions.
Definition: audio.c:100
AV_SAMPLE_FMT_FLTP
@ AV_SAMPLE_FMT_FLTP
float, planar
Definition: samplefmt.h:66
ShowSpectrumContext::win_size
int win_size
Definition: avf_showspectrum.c:93
AVFilterChannelLayouts
A list of supported channel layouts.
Definition: formats.h:85
AVFrame::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:592
plot_channel_lin
static int plot_channel_lin(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:1005
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
status
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
SCROLL
@ SCROLL
Definition: avf_showspectrum.c:54
av_clip
#define av_clip
Definition: common.h:95
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: avf_showspectrum.c:301
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ShowSpectrumContext::data
int data
Definition: avf_showspectrum.c:79
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:380
ShowSpectrumContext::frame_rate
AVRational frame_rate
Definition: avf_showspectrum.c:64
out
FILE * out
Definition: movenc.c:54
ff_avf_showspectrum
const AVFilter ff_avf_showspectrum
ShowSpectrumContext::nb_frames
unsigned int nb_frames
Definition: avf_showspectrum.c:116
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:262
ShowSpectrumContext::frames
AVFrame ** frames
Definition: avf_showspectrum.c:115
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:969
log2f
#define log2f(x)
Definition: libm.h:409
sample_fmts
static enum AVSampleFormat sample_fmts[]
Definition: adpcmenc.c:947
pick_color
static void pick_color(ShowSpectrumContext *s, float yf, float uf, float vf, float a, float *out)
Definition: avf_showspectrum.c:614
ff_channel_layouts_ref
int ff_channel_layouts_ref(AVFilterChannelLayouts *f, AVFilterChannelLayouts **ref)
Add *ref as a new reference to f.
Definition: formats.c:591
layouts
enum MovChannelLayoutTag * layouts
Definition: mov_chan.c:326
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
FFERROR_NOT_READY
return FFERROR_NOT_READY
Definition: filter_design.txt:204
ShowSpectrumContext::xpos
int xpos
x position (current column)
Definition: avf_showspectrum.c:80
ShowSpectrumContext::mode
int mode
channel display mode
Definition: avf_showspectrum.c:72
ShowSpectrumContext::overlap
float overlap
Definition: avf_showspectrum.c:96
AVTXContext
Definition: tx_priv.h:228
ShowSpectrumContext::gain
float gain
Definition: avf_showspectrum.c:97
floorf
static __device__ float floorf(float a)
Definition: cuda_runtime.h:172
draw_legend
static int draw_legend(AVFilterContext *ctx, uint64_t samples)
Definition: avf_showspectrum.c:779
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_asprintf
char * av_asprintf(const char *fmt,...)
Definition: avstring.c:116
ShowSpectrumContext::in_pts
int64_t in_pts
Definition: avf_showspectrum.c:103
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:99
ff_all_channel_counts
AVFilterChannelLayouts * ff_all_channel_counts(void)
Construct an AVFilterChannelLayouts coding for any channel layout, with known or unknown disposition.
Definition: formats.c:566
SQRT
@ SQRT
Definition: avf_showspectrum.c:52
PHASE
#define PHASE(y, ch)
Definition: avf_showspectrum.c:1314
ShowSpectrumContext::old_len
int old_len
Definition: avf_showspectrum.c:104
ColorTable::y
float y
Definition: avf_showspectrum.c:186
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
ShowSpectrumContext::w
int w
Definition: avf_showspectrum.c:61
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:437
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:661
M_PI_2
#define M_PI_2
Definition: mathematics.h:55
calc_channel_magnitudes
static int calc_channel_magnitudes(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:1316
AVOption
AVOption.
Definition: opt.h:251
clear_combine_buffer
static void clear_combine_buffer(ShowSpectrumContext *s, int size)
Definition: avf_showspectrum.c:1411
b
#define b
Definition: input.c:41
GREEN
@ GREEN
Definition: avf_showspectrum.c:53
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: internal.h:171
config_output
static int config_output(AVFilterLink *outlink)
Definition: avf_showspectrum.c:1059
data
const char data[16]
Definition: mxf.c:146
expf
#define expf(x)
Definition: libm.h:283
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:415
D_UPHASE
@ D_UPHASE
Definition: avf_showspectrum.c:50
calc_channel_uphases
static int calc_channel_uphases(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:1366
ShowSpectrumContext::ifft
AVTXContext ** ifft
Inverse Fast Fourier Transform context.
Definition: avf_showspectrum.c:82
ShowSpectrumContext::hop_size
int hop_size
Definition: avf_showspectrum.c:98
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
float.h
AVComplexFloat
Definition: tx.h:27
WIN_FUNC_OPTION
#define WIN_FUNC_OPTION(win_func_opt_name, win_func_offset, flag, default_window_func)
Definition: window_func.h:37
ShowSpectrumContext::saturation
float saturation
color saturation multiplier
Definition: avf_showspectrum.c:76
max
#define max(a, b)
Definition: cuda_runtime.h:33
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:165
COOL
@ COOL
Definition: avf_showspectrum.c:53
ShowSpectrumContext::channel_width
int channel_width
Definition: avf_showspectrum.c:69
AVChannelLayout::nb_channels
int nb_channels
Number of channels in this layout.
Definition: channel_layout.h:311
OFFSET
#define OFFSET(x)
Definition: avf_showspectrum.c:120
video.h
FF_FILTER_FORWARD_STATUS_BACK
#define FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink)
Forward the status on an output link to an input link.
Definition: filters.h:199
acalc_magnitudes
static void acalc_magnitudes(ShowSpectrumContext *s)
Definition: avf_showspectrum.c:1385
ceilf
static __device__ float ceilf(float a)
Definition: cuda_runtime.h:175
FIFTHRT
@ FIFTHRT
Definition: avf_showspectrum.c:52
scale_magnitudes
static void scale_magnitudes(ShowSpectrumContext *s, float scale)
Definition: avf_showspectrum.c:1399
av_tx_init
av_cold int av_tx_init(AVTXContext **ctx, av_tx_fn *tx, enum AVTXType type, int inv, int len, const void *scale, uint64_t flags)
Initialize a transform context with the given configuration (i)MDCTs with an odd length are currently...
Definition: tx.c:883
get_value
static float get_value(AVFilterContext *ctx, int ch, int y)
Definition: avf_showspectrum.c:981
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:351
ShowSpectrumContext::magnitudes
float ** magnitudes
Definition: avf_showspectrum.c:90
MORELAND
@ MORELAND
Definition: avf_showspectrum.c:53
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
S
#define S(s, c, i)
Definition: flacdsp_template.c:46
ShowSpectrumContext::win_func
int win_func
Definition: avf_showspectrum.c:92
ShowSpectrumContext::outpicref
AVFrame * outpicref
Definition: avf_showspectrum.c:65
ShowSpectrumContext::rotation
float rotation
color rotation
Definition: avf_showspectrum.c:77
ShowSpectrumContext::fft_scratch
AVComplexFloat ** fft_scratch
scratch buffers
Definition: avf_showspectrum.c:88
calc_channel_phases
static int calc_channel_phases(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:1331
ShowSpectrumContext::fft_in
AVComplexFloat ** fft_in
input FFT coeffs
Definition: avf_showspectrum.c:86
ColorMode
ColorMode
Definition: avf_showspectrum.c:53
cosf
#define cosf(x)
Definition: libm.h:78
ShowSpectrumContext::color_mode
int color_mode
display color scheme
Definition: avf_showspectrum.c:73
log10f
#define log10f(x)
Definition: libm.h:414
ShowSpectrumContext::color_buffer
float ** color_buffer
color buffer (4 * h * ch items)
Definition: avf_showspectrum.c:100
ShowSpectrumContext::window_func_lut
float * window_func_lut
Window function LUT.
Definition: avf_showspectrum.c:89
FULLFRAME
@ FULLFRAME
Definition: avf_showspectrum.c:54
unwrap
static void unwrap(float *x, int N, float tol, float *mi, float *ma)
Definition: avf_showspectrum.c:1345
AVFrame::ch_layout
AVChannelLayout ch_layout
Channel layout of the audio data.
Definition: frame.h:723
NB_FSCALES
@ NB_FSCALES
Definition: avf_showspectrum.c:51
scale
static av_always_inline float scale(float x, float s)
Definition: vf_v360.c:1389
pts
static int64_t pts
Definition: transcode_aac.c:653
RAINBOW
@ RAINBOW
Definition: avf_showspectrum.c:53
ShowSpectrumContext::buf_size
int buf_size
Definition: avf_showspectrum.c:94
fabsf
static __device__ float fabsf(float a)
Definition: cuda_runtime.h:181
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:49
a1
#define a1
Definition: regdef.h:47
D_PHASE
@ D_PHASE
Definition: avf_showspectrum.c:50
FIERY
@ FIERY
Definition: avf_showspectrum.c:53
INTENSITY
@ INTENSITY
Definition: avf_showspectrum.c:53
avassert.h
ceil
static __device__ float ceil(float a)
Definition: cuda_runtime.h:176
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
av_tx_fn
void(* av_tx_fn)(AVTXContext *s, void *out, void *in, ptrdiff_t stride)
Function pointer to a function to perform the transform.
Definition: tx.h:127
mask
static const uint16_t mask[17]
Definition: lzw.c:38
av_channel_layout_describe
int av_channel_layout_describe(const AVChannelLayout *channel_layout, char *buf, size_t buf_size)
Get a human-readable string describing the channel layout properties.
Definition: channel_layout.c:778
float
float
Definition: af_crystalizer.c:122
ff_outlink_set_status
static void ff_outlink_set_status(AVFilterLink *link, int status, int64_t pts)
Set the status field of a link from the source filter.
Definition: filters.h:189
NB_DMODES
@ NB_DMODES
Definition: avf_showspectrum.c:50
av_fast_realloc
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
Definition: mem.c:495
ff_inlink_request_frame
void ff_inlink_request_frame(AVFilterLink *link)
Mark that a frame is wanted on the link.
Definition: avfilter.c:1481
s
#define s(width, name)
Definition: cbs_vp9.c:256
get_scale
static float get_scale(AVFilterContext *ctx, int scale, float a)
Definition: avf_showspectrum.c:710
Orientation
Orientation
Definition: avf_showspectrum.c:55
FRUIT
@ FRUIT
Definition: avf_showspectrum.c:53
mi
#define mi
Definition: vf_colormatrix.c:108
CHANNEL
@ CHANNEL
Definition: avf_showspectrum.c:53
g
const char * g
Definition: vf_curves.c:127
VIRIDIS
@ VIRIDIS
Definition: avf_showspectrum.c:53
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:596
fminf
float fminf(float, float)
ShowSpectrumContext::h
int h
Definition: avf_showspectrum.c:61
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
filters.h
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
plot_channel_log
static int plot_channel_log(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:1028
AV_TX_FLOAT_FFT
@ AV_TX_FLOAT_FFT
Standard complex to complex FFT with sample data type of AVComplexFloat, AVComplexDouble or AVComplex...
Definition: tx.h:47
ShowSpectrumContext::fft
AVTXContext ** fft
Fast Fourier Transform context.
Definition: avf_showspectrum.c:81
ctx
AVFormatContext * ctx
Definition: movenc.c:48
exp2f
#define exp2f(x)
Definition: libm.h:293
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:465
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
inv_log_scale
static float inv_log_scale(float bin, float bmin, float bmax, float min, float max)
Definition: avf_showspectrum.c:698
color_range
color_range
Definition: vf_selectivecolor.c:44
VERTICAL
@ VERTICAL
Definition: avf_showspectrum.c:55
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:194
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
TERRAIN
@ TERRAIN
Definition: avf_showspectrum.c:53
ff_inlink_make_frame_writable
int ff_inlink_make_frame_writable(AVFilterLink *link, AVFrame **rframe)
Make sure a frame is writable.
Definition: avfilter.c:1408
arg
const char * arg
Definition: jacosubdec.c:67
COMBINED
@ COMBINED
Definition: avf_showspectrum.c:49
if
if(ret)
Definition: filter_design.txt:179
av_realloc_f
#define av_realloc_f(p, o, n)
Definition: tableprint_vlc.h:32
ShowSpectrumContext::win_scale
double win_scale
Definition: avf_showspectrum.c:95
ShowSpectrumContext::combine_buffer
float * combine_buffer
color combining buffer (4 * h items)
Definition: avf_showspectrum.c:99
ShowSpectrumContext::in_frame
AVFrame * in_frame
Definition: avf_showspectrum.c:66
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
ShowSpectrumContext::dmax
float dmax
Definition: avf_showspectrum.c:109
ff_inlink_consume_samples
int ff_inlink_consume_samples(AVFilterLink *link, unsigned min, unsigned max, AVFrame **rframe)
Take samples from the link's FIFO and update the link's stats.
Definition: avfilter.c:1383
NULL
#define NULL
Definition: coverity.c:32
NB_SLIDES
@ NB_SLIDES
Definition: avf_showspectrum.c:54
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
ShowSpectrumContext::fft_data
AVComplexFloat ** fft_data
bins holder for each (displayed) channels
Definition: avf_showspectrum.c:87
activate
filter_frame For filters that do not use the activate() callback
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:235
CIVIDIS
@ CIVIDIS
Definition: avf_showspectrum.c:53
parseutils.h
ShowSpectrumContext::start
int start
Definition: avf_showspectrum.c:78
get_time
static char * get_time(AVFilterContext *ctx, float seconds, int x)
Definition: avf_showspectrum.c:662
MAGNITUDE
#define MAGNITUDE(y, ch)
Definition: avf_showspectrum.c:1313
sqrtf
static __device__ float sqrtf(float a)
Definition: cuda_runtime.h:184
av_cpu_max_align
size_t av_cpu_max_align(void)
Get the maximum data alignment that may be required by FFmpeg.
Definition: cpu.c:265
generate_window_func
static void generate_window_func(float *lut, int N, int win_func, float *overlap)
Definition: window_func.h:76
SEPARATE
@ SEPARATE
Definition: avf_showspectrum.c:49
WFUNC_HANNING
@ WFUNC_HANNING
Definition: window_func.h:29
sinf
#define sinf(x)
Definition: libm.h:419
av_clipf
av_clipf
Definition: af_crystalizer.c:122
log_scale
static float log_scale(const float bin, const float bmin, const float bmax, const float min, const float max)
Definition: avf_showspectrum.c:677
ShowSpectrumContext::limit
float limit
Definition: avf_showspectrum.c:108
ff_inlink_acknowledge_status
int ff_inlink_acknowledge_status(AVFilterLink *link, int *rstatus, int64_t *rpts)
Test and acknowledge the change of status on the link.
Definition: avfilter.c:1318
get_iscale
static float get_iscale(AVFilterContext *ctx, int scale, float a)
Definition: avf_showspectrum.c:745
ShowSpectrumContext::samples
uint64_t samples
Definition: avf_showspectrum.c:110
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ShowSpectrumContext::auto_frame_rate
AVRational auto_frame_rate
Definition: avf_showspectrum.c:63
ShowSpectrumContext::pts
int64_t pts
Definition: avf_showspectrum.c:101
ShowSpectrumContext::dmin
float dmin
Definition: avf_showspectrum.c:109
ShowSpectrumContext::single_pic
int single_pic
Definition: avf_showspectrum.c:105
f
f
Definition: af_crystalizer.c:122
DataMode
DataMode
Definition: avf_showfreqs.c:38
DEFAULT_LENGTH
#define DEFAULT_LENGTH
Definition: avf_showspectrum.c:57
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:115
powf
#define powf(x, y)
Definition: libm.h:50
RSCROLL
@ RSCROLL
Definition: avf_showspectrum.c:54
D_MAGNITUDE
@ D_MAGNITUDE
Definition: avf_showspectrum.c:50
cpu.h
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
fmaxf
float fmaxf(float, float)
AV_SAMPLE_FMT_NONE
@ AV_SAMPLE_FMT_NONE
Definition: samplefmt.h:56
size
int size
Definition: twinvq_data.h:10344
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
PLASMA
@ PLASMA
Definition: avf_showspectrum.c:53
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
LREPLACE
@ LREPLACE
Definition: avf_showspectrum.c:54
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
FrequencyScale
FrequencyScale
Definition: avf_showcwt.c:37
ColorTable::a
float a
Definition: avf_showspectrum.c:186
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:167
xga_font_data.h
N
#define N
Definition: af_mcompand.c:53
a0
#define a0
Definition: regdef.h:46
M_PI
#define M_PI
Definition: mathematics.h:52
ShowSpectrumContext::nb_display_channels
int nb_display_channels
Definition: avf_showspectrum.c:67
av_tx_uninit
av_cold void av_tx_uninit(AVTXContext **ctx)
Frees a context and sets *ctx to NULL, does nothing when *ctx == NULL.
Definition: tx.c:294
ff_avf_showspectrumpic
const AVFilter ff_avf_showspectrumpic
internal.h
ShowSpectrumContext::scale
int scale
Definition: avf_showspectrum.c:74
AV_OPT_TYPE_FLOAT
@ AV_OPT_TYPE_FLOAT
Definition: opt.h:228
plot_spectrum_column
static int plot_spectrum_column(AVFilterLink *inlink, AVFrame *insamples)
Definition: avf_showspectrum.c:1423
ShowSpectrumContext::rate_str
char * rate_str
Definition: avf_showspectrum.c:62
av_parse_video_rate
int av_parse_video_rate(AVRational *rate, const char *arg)
Parse str and store the detected values in *rate.
Definition: parseutils.c:181
ShowSpectrumContext::old_pts
int64_t old_pts
Definition: avf_showspectrum.c:102
av_samples_copy
int av_samples_copy(uint8_t **dst, uint8_t *const *src, int dst_offset, int src_offset, int nb_samples, int nb_channels, enum AVSampleFormat sample_fmt)
Copy samples from src to dst.
Definition: samplefmt.c:222
AVFrame::nb_samples
int nb_samples
number of audio samples (per channel) described by this frame
Definition: frame.h:410
ShowSpectrumContext::phases
float ** phases
Definition: avf_showspectrum.c:91
lrintf
#define lrintf(x)
Definition: libm_mips.h:72
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
FLAGS
#define FLAGS
Definition: avf_showspectrum.c:121
ShowSpectrumContext::channel_height
int channel_height
Definition: avf_showspectrum.c:70
ShowSpectrumContext
Definition: avf_showspectrum.c:59
AVFrame::extended_data
uint8_t ** extended_data
pointers to the data planes/channels.
Definition: frame.h:391
bin_pos
static float bin_pos(const int bin, const int num_bins, const float min, const float max)
Definition: avf_showspectrum.c:705
AVSampleFormat
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:55
delta
float delta
Definition: vorbis_enc_data.h:430
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
cbrtf
static av_always_inline float cbrtf(float x)
Definition: libm.h:61
av_inv_q
static av_always_inline AVRational av_inv_q(AVRational q)
Invert a rational.
Definition: rational.h:159
NB_CLMODES
@ NB_CLMODES
Definition: avf_showspectrum.c:53
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:55
ff_inlink_queued_samples
int ff_inlink_queued_samples(AVFilterLink *link)
Definition: avfilter.c:1343
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:262
limit
static double limit(double x)
Definition: vf_pseudocolor.c:130
HORIZONTAL
@ HORIZONTAL
Definition: avf_showspectrum.c:55
F_LINEAR
@ F_LINEAR
Definition: avf_showspectrum.c:51
AVFilter
Filter definition.
Definition: avfilter.h:161
ret
ret
Definition: filter_design.txt:187
NB_ORIENTATIONS
@ NB_ORIENTATIONS
Definition: avf_showspectrum.c:55
color_range
static void color_range(ShowSpectrumContext *s, int ch, float *yf, float *uf, float *vf)
Definition: avf_showspectrum.c:552
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(showspectrum)
pos
unsigned int pos
Definition: spdifenc.c:413
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:432
ShowSpectrumContext::legend
int legend
Definition: avf_showspectrum.c:106
run_channel_fft
static int run_channel_fft(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:390
ShowSpectrumContext::fscale
int fscale
Definition: avf_showspectrum.c:75
window_func.h
NEBULAE
@ NEBULAE
Definition: avf_showspectrum.c:53
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: avf_showspectrum.c:359
ShowSpectrumContext::start_x
int start_x
Definition: avf_showspectrum.c:107
ShowSpectrumContext::stop
int stop
zoom mode
Definition: avf_showspectrum.c:78
ff_all_samplerates
AVFilterFormats * ff_all_samplerates(void)
Definition: formats.c:551
ShowSpectrumContext::orientation
int orientation
Definition: avf_showspectrum.c:68
channel_layout.h
mode
mode
Definition: ebur128.h:83
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
LOG
@ LOG
Definition: avf_showspectrum.c:52
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
ShowSpectrumContext::fft_size
int fft_size
number of coeffs (FFT window size)
Definition: avf_showspectrum.c:85
avfilter.h
FOURTHRT
@ FOURTHRT
Definition: avf_showspectrum.c:52
NB_MODES
@ NB_MODES
Definition: avf_showspectrum.c:49
cm
#define cm
Definition: dvbsubdec.c:39
F_LOG
@ F_LOG
Definition: avf_showspectrum.c:51
L
#define L(x)
Definition: vpx_arith.h:36
samples
Filter the word “frame” indicates either a video frame or a group of audio samples
Definition: filter_design.txt:8
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
ShowSpectrumContext::opacity_factor
float opacity_factor
Definition: avf_showspectrum.c:113
av_clip_uint8
#define av_clip_uint8
Definition: common.h:101
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
ff_outlink_get_status
int ff_outlink_get_status(AVFilterLink *link)
Get the status on an output link.
Definition: avfilter.c:1504
AVFilterContext
An instance of a filter.
Definition: avfilter.h:392
DisplayMode
DisplayMode
Definition: avf_ahistogram.c:34
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
ColorTable::v
float v
Definition: avf_showspectrum.c:186
LINEAR
@ LINEAR
Definition: avf_showspectrum.c:52
audio.h
ShowSpectrumContext::start_y
int start_y
Definition: avf_showspectrum.c:107
M_LN10
#define M_LN10
Definition: mathematics.h:43
AVFilterFormatsConfig::formats
AVFilterFormats * formats
List of supported formats (pixel or sample).
Definition: avfilter.h:496
DisplayScale
DisplayScale
Definition: avf_ahistogram.c:31
ShowSpectrumContext::itx_fn
av_tx_fn itx_fn
Definition: avf_showspectrum.c:84
ShowSpectrumContext::tx_fn
av_tx_fn tx_fn
Definition: avf_showspectrum.c:83
avpriv_cga_font
const uint8_t avpriv_cga_font[2048]
Definition: xga_font_data.c:29
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
NB_SCALES
@ NB_SCALES
Definition: avf_showspectrum.c:52
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:244
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:195
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
ShowSpectrumContext::sliding
int sliding
1 if sliding mode, 0 otherwise
Definition: avf_showspectrum.c:71
d
d
Definition: ffmpeg_filter.c:156
ColorTable::u
float u
Definition: avf_showspectrum.c:186
ShowSpectrumContext::drange
float drange
Definition: avf_showspectrum.c:108
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
CBRT
@ CBRT
Definition: avf_showspectrum.c:52
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
ma
#define ma
Definition: vf_colormatrix.c:100
h
h
Definition: vp9dsp_template.c:2038
MAGMA
@ MAGMA
Definition: avf_showspectrum.c:53
ff_outlink_frame_wanted
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the ff_outlink_frame_wanted() function. If this function returns true
color_table
static const struct ColorTable color_table[][8]
avstring.h
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:229
showspectrum_options
static const AVOption showspectrum_options[]
Definition: avf_showspectrum.c:123
ff_filter_execute
static av_always_inline int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: internal.h:146
ColorTable
Definition: avf_showspectrum.c:185
get_hz
static float get_hz(const float bin, const float bmax, const float min, const float max, int fscale)
Definition: avf_showspectrum.c:684
int
int
Definition: ffmpeg_filter.c:156
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
REPLACE
@ REPLACE
Definition: avf_showspectrum.c:54
FIRE
@ FIRE
Definition: avf_showspectrum.c:53
drawtext
static void drawtext(AVFrame *pic, int x, int y, const char *txt, int o)
Definition: avf_showspectrum.c:497
channel
channel
Definition: ebur128.h:39
ShowSpectrumContext::frames_size
unsigned int frames_size
Definition: avf_showspectrum.c:117
SlideMode
SlideMode
Definition: avf_ahistogram.c:33
ff_filter_set_ready
void ff_filter_set_ready(AVFilterContext *filter, unsigned priority)
Mark a filter ready and schedule it for activation.
Definition: avfilter.c:204
tx.h
min
float min
Definition: vorbis_enc_data.h:429
ShowSpectrumContext::plot_channel
int(* plot_channel)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: avf_showspectrum.c:111