FFmpeg
avf_aphasemeter.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Paul B Mahol
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * audio to video multimedia aphasemeter filter
24  */
25 
27 #include "libavutil/intreadwrite.h"
28 #include "libavutil/opt.h"
29 #include "libavutil/parseutils.h"
30 #include "libavutil/timestamp.h"
31 #include "avfilter.h"
32 #include "formats.h"
33 #include "audio.h"
34 #include "video.h"
35 #include "internal.h"
36 #include "float.h"
37 
38 typedef struct AudioPhaseMeterContext {
39  const AVClass *class;
41  int do_video;
43  int w, h;
45  int contrast[4];
46  uint8_t *mpc_str;
47  uint8_t mpc[4];
49  int is_mono;
53  float tolerance;
54  float angle;
55  float phase;
57  int64_t duration;
58  int64_t frame_end;
59  int64_t mono_idx[2];
60  int64_t out_phase_idx[2];
62 
63 #define MAX_DURATION (24*60*60*1000000LL)
64 #define OFFSET(x) offsetof(AudioPhaseMeterContext, x)
65 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
66 #define get_duration(index) (index[1] - index[0])
67 
68 static const AVOption aphasemeter_options[] = {
69  { "rate", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str="25"}, 0, INT_MAX, FLAGS },
70  { "r", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str="25"}, 0, INT_MAX, FLAGS },
71  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str="800x400"}, 0, 0, FLAGS },
72  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str="800x400"}, 0, 0, FLAGS },
73  { "rc", "set red contrast", OFFSET(contrast[0]), AV_OPT_TYPE_INT, {.i64=2}, 0, 255, FLAGS },
74  { "gc", "set green contrast", OFFSET(contrast[1]), AV_OPT_TYPE_INT, {.i64=7}, 0, 255, FLAGS },
75  { "bc", "set blue contrast", OFFSET(contrast[2]), AV_OPT_TYPE_INT, {.i64=1}, 0, 255, FLAGS },
76  { "mpc", "set median phase color", OFFSET(mpc_str), AV_OPT_TYPE_STRING, {.str = "none"}, 0, 0, FLAGS },
77  { "video", "set video output", OFFSET(do_video), AV_OPT_TYPE_BOOL, {.i64 = 1}, 0, 1, FLAGS },
78  { "phasing", "set mono and out-of-phase detection output", OFFSET(do_phasing_detection), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, FLAGS },
79  { "tolerance", "set phase tolerance for mono detection", OFFSET(tolerance), AV_OPT_TYPE_FLOAT, {.dbl = 0.}, 0, 1, FLAGS },
80  { "t", "set phase tolerance for mono detection", OFFSET(tolerance), AV_OPT_TYPE_FLOAT, {.dbl = 0.}, 0, 1, FLAGS },
81  { "angle", "set angle threshold for out-of-phase detection", OFFSET(angle), AV_OPT_TYPE_FLOAT, {.dbl = 170.}, 90, 180, FLAGS },
82  { "a", "set angle threshold for out-of-phase detection", OFFSET(angle), AV_OPT_TYPE_FLOAT, {.dbl = 170.}, 90, 180, FLAGS },
83  { "duration", "set minimum mono or out-of-phase duration in seconds", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64=2000000}, 0, MAX_DURATION, FLAGS },
84  { "d", "set minimum mono or out-of-phase duration in seconds", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64=2000000}, 0, MAX_DURATION, FLAGS },
85  { NULL }
86 };
87 
88 AVFILTER_DEFINE_CLASS(aphasemeter);
89 
91 {
92  AudioPhaseMeterContext *s = ctx->priv;
95  AVFilterLink *inlink = ctx->inputs[0];
96  AVFilterLink *outlink = ctx->outputs[0];
98  static const enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_RGBA, AV_PIX_FMT_NONE };
99  int ret;
100 
102  if ((ret = ff_formats_ref (formats, &inlink->outcfg.formats )) < 0 ||
103  (ret = ff_formats_ref (formats, &outlink->incfg.formats )) < 0 ||
105  (ret = ff_channel_layouts_ref (layout , &inlink->outcfg.channel_layouts)) < 0 ||
106  (ret = ff_channel_layouts_ref (layout , &outlink->incfg.channel_layouts)) < 0)
107  return ret;
108 
110  if ((ret = ff_formats_ref(formats, &inlink->outcfg.samplerates)) < 0 ||
111  (ret = ff_formats_ref(formats, &outlink->incfg.samplerates)) < 0)
112  return ret;
113 
114  if (s->do_video) {
115  AVFilterLink *outlink = ctx->outputs[1];
116 
118  if ((ret = ff_formats_ref(formats, &outlink->incfg.formats)) < 0)
119  return ret;
120  }
121 
122  return 0;
123 }
124 
126 {
127  AVFilterContext *ctx = inlink->dst;
128  AudioPhaseMeterContext *s = ctx->priv;
129  int nb_samples;
130  s->duration = av_rescale(s->duration, inlink->sample_rate, AV_TIME_BASE);
131 
132  if (s->do_video) {
133  nb_samples = FFMAX(1, av_rescale(inlink->sample_rate, s->frame_rate.den, s->frame_rate.num));
134  inlink->min_samples =
135  inlink->max_samples = nb_samples;
136  }
137 
138  return 0;
139 }
140 
141 static int config_video_output(AVFilterLink *outlink)
142 {
143  AVFilterContext *ctx = outlink->src;
144  AudioPhaseMeterContext *s = ctx->priv;
145 
146  outlink->w = s->w;
147  outlink->h = s->h;
148  outlink->sample_aspect_ratio = (AVRational){1,1};
149  outlink->frame_rate = s->frame_rate;
150 
151  if (!strcmp(s->mpc_str, "none"))
152  s->draw_median_phase = 0;
153  else if (av_parse_color(s->mpc, s->mpc_str, -1, ctx) >= 0)
154  s->draw_median_phase = 1;
155  else
156  return AVERROR(EINVAL);
157 
158  return 0;
159 }
160 
161 static inline int get_x(float phase, int w)
162 {
163  return (phase + 1.) / 2. * (w - 1);
164 }
165 
166 static inline void add_metadata(AVFrame *insamples, const char *key, char *value)
167 {
168  char buf[128];
169 
170  snprintf(buf, sizeof(buf), "lavfi.aphasemeter.%s", key);
171  av_dict_set(&insamples->metadata, buf, value, 0);
172 }
173 
174 static inline void update_mono_detection(AudioPhaseMeterContext *s, AVFrame *insamples, int mono_measurement)
175 {
176  int64_t mono_duration;
177  if (!s->is_mono && mono_measurement) {
178  s->is_mono = 1;
179  s->start_mono_presence = 1;
180  s->mono_idx[0] = insamples->pts;
181  }
182  if (s->is_mono && mono_measurement && s->start_mono_presence) {
183  s->mono_idx[1] = s->frame_end;
184  mono_duration = get_duration(s->mono_idx);
185  if (mono_duration >= s->duration) {
186  add_metadata(insamples, "mono_start", av_ts2timestr(s->mono_idx[0], &s->time_base));
187  av_log(s, AV_LOG_INFO, "mono_start: %s\n", av_ts2timestr(s->mono_idx[0], &s->time_base));
188  s->start_mono_presence = 0;
189  }
190  }
191  if (s->is_mono && !mono_measurement) {
192  s->mono_idx[1] = insamples ? insamples->pts : s->frame_end;
193  mono_duration = get_duration(s->mono_idx);
194  if (mono_duration >= s->duration) {
195  if (insamples) {
196  add_metadata(insamples, "mono_end", av_ts2timestr(s->mono_idx[1], &s->time_base));
197  add_metadata(insamples, "mono_duration", av_ts2timestr(mono_duration, &s->time_base));
198  }
199  av_log(s, AV_LOG_INFO, "mono_end: %s | mono_duration: %s\n", av_ts2timestr(s->mono_idx[1], &s->time_base), av_ts2timestr(mono_duration, &s->time_base));
200  }
201  s->is_mono = 0;
202  }
203 }
204 
205 static inline void update_out_phase_detection(AudioPhaseMeterContext *s, AVFrame *insamples, int out_phase_measurement)
206 {
207  int64_t out_phase_duration;
208  if (!s->is_out_phase && out_phase_measurement) {
209  s->is_out_phase = 1;
210  s->start_out_phase_presence = 1;
211  s->out_phase_idx[0] = insamples->pts;
212  }
213  if (s->is_out_phase && out_phase_measurement && s->start_out_phase_presence) {
214  s->out_phase_idx[1] = s->frame_end;
215  out_phase_duration = get_duration(s->out_phase_idx);
216  if (out_phase_duration >= s->duration) {
217  add_metadata(insamples, "out_phase_start", av_ts2timestr(s->out_phase_idx[0], &s->time_base));
218  av_log(s, AV_LOG_INFO, "out_phase_start: %s\n", av_ts2timestr(s->out_phase_idx[0], &s->time_base));
219  s->start_out_phase_presence = 0;
220  }
221  }
222  if (s->is_out_phase && !out_phase_measurement) {
223  s->out_phase_idx[1] = insamples ? insamples->pts : s->frame_end;
224  out_phase_duration = get_duration(s->out_phase_idx);
225  if (out_phase_duration >= s->duration) {
226  if (insamples) {
227  add_metadata(insamples, "out_phase_end", av_ts2timestr(s->out_phase_idx[1], &s->time_base));
228  add_metadata(insamples, "out_phase_duration", av_ts2timestr(out_phase_duration, &s->time_base));
229  }
230  av_log(s, AV_LOG_INFO, "out_phase_end: %s | out_phase_duration: %s\n", av_ts2timestr(s->out_phase_idx[1], &s->time_base), av_ts2timestr(out_phase_duration, &s->time_base));
231  }
232  s->is_out_phase = 0;
233  }
234 }
235 
237 {
238  AVFilterContext *ctx = inlink->dst;
239  AudioPhaseMeterContext *s = ctx->priv;
240  AVFilterLink *outlink = s->do_video ? ctx->outputs[1] : NULL;
241  AVFilterLink *aoutlink = ctx->outputs[0];
242  AVDictionary **metadata;
243  const int rc = s->contrast[0];
244  const int gc = s->contrast[1];
245  const int bc = s->contrast[2];
246  float fphase = 0;
247  AVFrame *out;
248  uint8_t *dst;
249  int i;
250  int mono_measurement;
251  int out_phase_measurement;
252  float tolerance = 1.0f - s->tolerance;
253  float angle = cosf(s->angle/180.0f*M_PI);
254 
255  if (s->do_video && (!s->out || s->out->width != outlink->w ||
256  s->out->height != outlink->h)) {
257  av_frame_free(&s->out);
258  s->out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
259  if (!s->out) {
260  av_frame_free(&in);
261  return AVERROR(ENOMEM);
262  }
263 
264  out = s->out;
265  for (i = 0; i < outlink->h; i++)
266  memset(out->data[0] + i * out->linesize[0], 0, outlink->w * 4);
267  } else if (s->do_video) {
268  out = s->out;
269  for (i = outlink->h - 1; i >= 10; i--)
270  memmove(out->data[0] + (i ) * out->linesize[0],
271  out->data[0] + (i-1) * out->linesize[0],
272  outlink->w * 4);
273  for (i = 0; i < outlink->w; i++)
274  AV_WL32(out->data[0] + i * 4, 0);
275  }
276 
277  for (i = 0; i < in->nb_samples; i++) {
278  const float *src = (float *)in->data[0] + i * 2;
279  const float f = src[0] * src[1] / (src[0]*src[0] + src[1] * src[1]) * 2;
280  const float phase = isnan(f) ? 1 : f;
281  const int x = get_x(phase, s->w);
282 
283  if (s->do_video) {
284  dst = out->data[0] + x * 4;
285  dst[0] = FFMIN(255, dst[0] + rc);
286  dst[1] = FFMIN(255, dst[1] + gc);
287  dst[2] = FFMIN(255, dst[2] + bc);
288  dst[3] = 255;
289  }
290  fphase += phase;
291  }
292  fphase /= in->nb_samples;
293  s->phase = fphase;
294 
295  if (s->do_video) {
296  if (s->draw_median_phase) {
297  dst = out->data[0] + get_x(fphase, s->w) * 4;
298  AV_WL32(dst, AV_RL32(s->mpc));
299  }
300 
301  for (i = 1; i < 10 && i < outlink->h; i++)
302  memcpy(out->data[0] + i * out->linesize[0], out->data[0], outlink->w * 4);
303  }
304 
305  metadata = &in->metadata;
306  if (metadata) {
307  uint8_t value[128];
308 
309  snprintf(value, sizeof(value), "%f", fphase);
310  add_metadata(in, "phase", value);
311  }
312 
313  if (s->do_phasing_detection) {
314  s->time_base = inlink->time_base;
315  s->frame_end = in->pts + av_rescale_q(in->nb_samples,
316  (AVRational){ 1, in->sample_rate }, inlink->time_base);
317 
318  mono_measurement = (tolerance - fphase) < FLT_EPSILON;
319  out_phase_measurement = (angle - fphase) > FLT_EPSILON;
320 
321  update_mono_detection(s, in, mono_measurement);
322  update_out_phase_detection(s, in, out_phase_measurement);
323  }
324 
325  if (s->do_video) {
326  AVFrame *clone;
327 
328  s->out->pts = in->pts;
329  clone = av_frame_clone(s->out);
330  if (!clone)
331  return AVERROR(ENOMEM);
332  ff_filter_frame(outlink, clone);
333  }
334  return ff_filter_frame(aoutlink, in);
335 }
336 
338 {
339  AudioPhaseMeterContext *s = ctx->priv;
340 
341  if (s->do_phasing_detection) {
344  }
345  av_frame_free(&s->out);
346 }
347 
349 {
350  AudioPhaseMeterContext *s = ctx->priv;
351  AVFilterPad pad;
352  int ret;
353 
354  pad = (AVFilterPad){
355  .name = "out0",
356  .type = AVMEDIA_TYPE_AUDIO,
357  };
358  ret = ff_append_outpad(ctx, &pad);
359  if (ret < 0)
360  return ret;
361 
362  if (s->do_video) {
363  pad = (AVFilterPad){
364  .name = "out1",
365  .type = AVMEDIA_TYPE_VIDEO,
366  .config_props = config_video_output,
367  };
368  ret = ff_append_outpad(ctx, &pad);
369  if (ret < 0)
370  return ret;
371  }
372 
373  return 0;
374 }
375 
376 static const AVFilterPad inputs[] = {
377  {
378  .name = "default",
379  .type = AVMEDIA_TYPE_AUDIO,
380  .config_props = config_input,
381  .filter_frame = filter_frame,
382  },
383 };
384 
386  .name = "aphasemeter",
387  .description = NULL_IF_CONFIG_SMALL("Convert input audio to phase meter video output."),
388  .init = init,
389  .uninit = uninit,
390  .priv_size = sizeof(AudioPhaseMeterContext),
392  .outputs = NULL,
394  .priv_class = &aphasemeter_class,
396 };
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:98
inputs
static const AVFilterPad inputs[]
Definition: avf_aphasemeter.c:376
config_video_output
static int config_video_output(AVFilterLink *outlink)
Definition: avf_aphasemeter.c:141
AudioPhaseMeterContext::start_mono_presence
int start_mono_presence
Definition: avf_aphasemeter.c:51
AVFilterChannelLayouts
A list of supported channel layouts.
Definition: formats.h:85
AudioPhaseMeterContext::duration
int64_t duration
Definition: avf_aphasemeter.c:57
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
AVFilterFormatsConfig::samplerates
AVFilterFormats * samplerates
Lists of supported sample rates, only for audio.
Definition: avfilter.h:495
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:381
AV_WL32
#define AV_WL32(p, v)
Definition: intreadwrite.h:426
AVFilterFormatsConfig::channel_layouts
AVFilterChannelLayouts * channel_layouts
Lists of supported channel layouts, only for audio.
Definition: avfilter.h:500
out
FILE * out
Definition: movenc.c:54
init
static av_cold int init(AVFilterContext *ctx)
Definition: avf_aphasemeter.c:348
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1018
sample_fmts
static enum AVSampleFormat sample_fmts[]
Definition: adpcmenc.c:948
ff_channel_layouts_ref
int ff_channel_layouts_ref(AVFilterChannelLayouts *f, AVFilterChannelLayouts **ref)
Add *ref as a new reference to f.
Definition: formats.c:550
av_parse_color
int av_parse_color(uint8_t *rgba_color, const char *color_string, int slen, void *log_ctx)
Put the RGBA values that correspond to color_string in rgba_color.
Definition: parseutils.c:356
AV_OPT_TYPE_VIDEO_RATE
@ AV_OPT_TYPE_VIDEO_RATE
offset must point to AVRational
Definition: opt.h:237
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:112
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:310
config_input
static int config_input(AVFilterLink *inlink)
Definition: avf_aphasemeter.c:125
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:415
w
uint8_t w
Definition: llviddspenc.c:38
AudioPhaseMeterContext::w
int w
Definition: avf_aphasemeter.c:43
AVOption
AVOption.
Definition: opt.h:247
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: internal.h:168
AV_OPT_TYPE_DURATION
@ AV_OPT_TYPE_DURATION
Definition: opt.h:238
FLAGS
#define FLAGS
Definition: avf_aphasemeter.c:65
float.h
AVDictionary
Definition: dict.c:30
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:153
video.h
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:329
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
AudioPhaseMeterContext::start_out_phase_presence
int start_out_phase_presence
Definition: avf_aphasemeter.c:52
cosf
#define cosf(x)
Definition: libm.h:78
AV_CH_LAYOUT_STEREO
#define AV_CH_LAYOUT_STEREO
Definition: channel_layout.h:91
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:50
AudioPhaseMeterContext::is_out_phase
int is_out_phase
Definition: avf_aphasemeter.c:50
update_out_phase_detection
static void update_out_phase_detection(AudioPhaseMeterContext *s, AVFrame *insamples, int out_phase_measurement)
Definition: avf_aphasemeter.c:205
av_cold
#define av_cold
Definition: attributes.h:90
duration
int64_t duration
Definition: movenc.c:64
ff_add_channel_layout
int ff_add_channel_layout(AVFilterChannelLayouts **l, uint64_t channel_layout)
Definition: formats.c:426
AudioPhaseMeterContext::out
AVFrame * out
Definition: avf_aphasemeter.c:40
intreadwrite.h
filter_frame
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: avf_aphasemeter.c:236
s
#define s(width, name)
Definition: cbs_vp9.c:257
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:555
AudioPhaseMeterContext::frame_rate
AVRational frame_rate
Definition: avf_aphasemeter.c:44
AudioPhaseMeterContext::mono_idx
int64_t mono_idx[2]
Definition: avf_aphasemeter.c:59
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:290
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: avf_aphasemeter.c:90
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:424
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:141
key
const char * key
Definition: hwcontext_opencl.c:168
AudioPhaseMeterContext::h
int h
Definition: avf_aphasemeter.c:43
f
#define f(width, name)
Definition: cbs_vp9.c:255
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:191
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
if
if(ret)
Definition: filter_design.txt:179
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
isnan
#define isnan(x)
Definition: libm.h:340
aphasemeter_options
static const AVOption aphasemeter_options[]
Definition: avf_aphasemeter.c:68
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:234
src
#define src
Definition: vp8dsp.c:255
parseutils.h
AudioPhaseMeterContext::do_phasing_detection
int do_phasing_detection
Definition: avf_aphasemeter.c:42
update_mono_detection
static void update_mono_detection(AudioPhaseMeterContext *s, AVFrame *insamples, int mono_measurement)
Definition: avf_aphasemeter.c:174
AudioPhaseMeterContext::frame_end
int64_t frame_end
Definition: avf_aphasemeter.c:58
AudioPhaseMeterContext::do_video
int do_video
Definition: avf_aphasemeter.c:41
AudioPhaseMeterContext
Definition: avf_aphasemeter.c:38
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(aphasemeter)
AVFILTER_FLAG_DYNAMIC_OUTPUTS
#define AVFILTER_FLAG_DYNAMIC_OUTPUTS
The number of the filter outputs is not determined just by AVFilter.outputs.
Definition: avfilter.h:116
MAX_DURATION
#define MAX_DURATION
Definition: avf_aphasemeter.c:63
get_x
static int get_x(float phase, int w)
Definition: avf_aphasemeter.c:161
av_ts2timestr
#define av_ts2timestr(ts, tb)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: timestamp.h:76
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
AV_SAMPLE_FMT_NONE
@ AV_SAMPLE_FMT_NONE
Definition: samplefmt.h:59
AudioPhaseMeterContext::mpc_str
uint8_t * mpc_str
Definition: avf_aphasemeter.c:46
AudioPhaseMeterContext::is_mono
int is_mono
Definition: avf_aphasemeter.c:49
OFFSET
#define OFFSET(x)
Definition: avf_aphasemeter.c:64
M_PI
#define M_PI
Definition: mathematics.h:52
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
internal.h
AV_OPT_TYPE_FLOAT
@ AV_OPT_TYPE_FLOAT
Definition: opt.h:227
layout
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel layout
Definition: filter_design.txt:18
AudioPhaseMeterContext::phase
float phase
Definition: avf_aphasemeter.c:55
AVFrame::nb_samples
int nb_samples
number of audio samples (per channel) described by this frame
Definition: frame.h:388
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:271
AV_TIME_BASE
#define AV_TIME_BASE
Internal time base represented as integer.
Definition: avutil.h:254
AVSampleFormat
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:58
ff_avf_aphasemeter
const AVFilter ff_avf_aphasemeter
Definition: avf_aphasemeter.c:385
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AudioPhaseMeterContext::mpc
uint8_t mpc[4]
Definition: avf_aphasemeter.c:47
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:56
av_rescale
int64_t av_rescale(int64_t a, int64_t b, int64_t c)
Rescale a 64-bit integer with rounding to nearest.
Definition: mathematics.c:128
AudioPhaseMeterContext::contrast
int contrast[4]
Definition: avf_aphasemeter.c:45
AudioPhaseMeterContext::time_base
AVRational time_base
Definition: avf_aphasemeter.c:56
AVFilter
Filter definition.
Definition: avfilter.h:149
ret
ret
Definition: filter_design.txt:187
AV_RL32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:92
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: avf_aphasemeter.c:337
ff_all_samplerates
AVFilterFormats * ff_all_samplerates(void)
Definition: formats.c:510
channel_layout.h
AudioPhaseMeterContext::draw_median_phase
int draw_median_phase
Definition: avf_aphasemeter.c:48
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:224
avfilter.h
AVFrame::metadata
AVDictionary * metadata
metadata.
Definition: frame.h:591
add_metadata
static void add_metadata(AVFrame *insamples, const char *key, char *value)
Definition: avf_aphasemeter.c:166
AVFilterContext
An instance of a filter.
Definition: avfilter.h:386
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
audio.h
get_duration
#define get_duration(index)
Definition: avf_aphasemeter.c:66
AVFilterFormatsConfig::formats
AVFilterFormats * formats
List of supported formats (pixel or sample).
Definition: avfilter.h:490
AudioPhaseMeterContext::tolerance
float tolerance
Definition: avf_aphasemeter.c:53
ff_append_outpad
int ff_append_outpad(AVFilterContext *f, AVFilterPad *p)
Definition: avfilter.c:150
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:241
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
timestamp.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
AudioPhaseMeterContext::angle
float angle
Definition: avf_aphasemeter.c:54
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:228
snprintf
#define snprintf
Definition: snprintf.h:34
AV_SAMPLE_FMT_FLT
@ AV_SAMPLE_FMT_FLT
float
Definition: samplefmt.h:63
AudioPhaseMeterContext::out_phase_idx
int64_t out_phase_idx[2]
Definition: avf_aphasemeter.c:60