FFmpeg
vf_signalstats.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
3  * Copyright (c) 2014 Clément Bœsch
4  * Copyright (c) 2014 Dave Rice @dericed
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include "libavutil/intreadwrite.h"
24 #include "libavutil/opt.h"
25 #include "libavutil/pixdesc.h"
26 #include "internal.h"
27 
28 enum FilterMode {
34 };
35 
36 typedef struct SignalstatsContext {
37  const AVClass *class;
38  int chromah; // height of chroma plane
39  int chromaw; // width of chroma plane
40  int hsub; // horizontal subsampling
41  int vsub; // vertical subsampling
42  int depth; // pixel depth
43  int fs; // pixel count per frame
44  int cfs; // pixel count per frame of chroma planes
45  int outfilter; // FilterMode
46  int filters;
49  int yuv_color[3];
50  int nb_jobs;
51  int *jobs_rets;
52 
53  int *histy, *histu, *histv, *histsat;
54 
58 
59 typedef struct ThreadData {
60  const AVFrame *in;
61  AVFrame *out;
62 } ThreadData;
63 
64 typedef struct ThreadDataHueSatMetrics {
65  const AVFrame *src;
66  AVFrame *dst_sat, *dst_hue;
68 
69 #define OFFSET(x) offsetof(SignalstatsContext, x)
70 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
71 
72 static const AVOption signalstats_options[] = {
73  {"stat", "set statistics filters", OFFSET(filters), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "filters"},
74  {"tout", "analyze pixels for temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_TOUT}, 0, 0, FLAGS, "filters"},
75  {"vrep", "analyze video lines for vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_VREP}, 0, 0, FLAGS, "filters"},
76  {"brng", "analyze for pixels outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_BRNG}, 0, 0, FLAGS, "filters"},
77  {"out", "set video filter", OFFSET(outfilter), AV_OPT_TYPE_INT, {.i64=FILTER_NONE}, -1, FILT_NUMB-1, FLAGS, "out"},
78  {"tout", "highlight pixels that depict temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_TOUT}, 0, 0, FLAGS, "out"},
79  {"vrep", "highlight video lines that depict vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_VREP}, 0, 0, FLAGS, "out"},
80  {"brng", "highlight pixels that are outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_BRNG}, 0, 0, FLAGS, "out"},
81  {"c", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
82  {"color", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
83  {NULL}
84 };
85 
86 AVFILTER_DEFINE_CLASS(signalstats);
87 
89 {
90  uint8_t r, g, b;
91  SignalstatsContext *s = ctx->priv;
92 
93  if (s->outfilter != FILTER_NONE)
94  s->filters |= 1 << s->outfilter;
95 
96  r = s->rgba_color[0];
97  g = s->rgba_color[1];
98  b = s->rgba_color[2];
99  s->yuv_color[0] = (( 66*r + 129*g + 25*b + (1<<7)) >> 8) + 16;
100  s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
101  s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
102  return 0;
103 }
104 
106 {
107  SignalstatsContext *s = ctx->priv;
111  av_freep(&s->jobs_rets);
112  av_freep(&s->histy);
113  av_freep(&s->histu);
114  av_freep(&s->histv);
115  av_freep(&s->histsat);
116 }
117 
119 {
120  // TODO: add more
121  static const enum AVPixelFormat pix_fmts[] = {
134  };
135 
136  AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
137  if (!fmts_list)
138  return AVERROR(ENOMEM);
139  return ff_set_common_formats(ctx, fmts_list);
140 }
141 
142 static AVFrame *alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
143 {
145  if (!frame)
146  return NULL;
147 
148  frame->format = pixfmt;
149  frame->width = w;
150  frame->height = h;
151 
152  if (av_frame_get_buffer(frame, 32) < 0) {
153  av_frame_free(&frame);
154  return NULL;
155  }
156 
157  return frame;
158 }
159 
160 static int config_props(AVFilterLink *outlink)
161 {
162  AVFilterContext *ctx = outlink->src;
163  SignalstatsContext *s = ctx->priv;
164  AVFilterLink *inlink = outlink->src->inputs[0];
166  s->hsub = desc->log2_chroma_w;
167  s->vsub = desc->log2_chroma_h;
168  s->depth = desc->comp[0].depth;
169  if (s->depth > 8) {
170  s->histy = av_malloc_array(1 << s->depth, sizeof(*s->histy));
171  s->histu = av_malloc_array(1 << s->depth, sizeof(*s->histu));
172  s->histv = av_malloc_array(1 << s->depth, sizeof(*s->histv));
173  s->histsat = av_malloc_array(1 << s->depth, sizeof(*s->histsat));
174 
175  if (!s->histy || !s->histu || !s->histv || !s->histsat)
176  return AVERROR(ENOMEM);
177  }
178 
179  outlink->w = inlink->w;
180  outlink->h = inlink->h;
181 
182  s->chromaw = AV_CEIL_RSHIFT(inlink->w, s->hsub);
183  s->chromah = AV_CEIL_RSHIFT(inlink->h, s->vsub);
184 
185  s->fs = inlink->w * inlink->h;
186  s->cfs = s->chromaw * s->chromah;
187 
188  s->nb_jobs = FFMAX(1, FFMIN(inlink->h, ff_filter_get_nb_threads(ctx)));
189  s->jobs_rets = av_malloc_array(s->nb_jobs, sizeof(*s->jobs_rets));
190  if (!s->jobs_rets)
191  return AVERROR(ENOMEM);
192 
193  s->frame_sat = alloc_frame(s->depth > 8 ? AV_PIX_FMT_GRAY16 : AV_PIX_FMT_GRAY8, inlink->w, inlink->h);
194  s->frame_hue = alloc_frame(AV_PIX_FMT_GRAY16, inlink->w, inlink->h);
195  if (!s->frame_sat || !s->frame_hue)
196  return AVERROR(ENOMEM);
197 
198  return 0;
199 }
200 
201 static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
202 {
203  const int chromax = x >> s->hsub;
204  const int chromay = y >> s->vsub;
205  f->data[0][y * f->linesize[0] + x] = s->yuv_color[0];
206  f->data[1][chromay * f->linesize[1] + chromax] = s->yuv_color[1];
207  f->data[2][chromay * f->linesize[2] + chromax] = s->yuv_color[2];
208 }
209 
210 static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
211 {
212  const int chromax = x >> s->hsub;
213  const int chromay = y >> s->vsub;
214  const int mult = 1 << (s->depth - 8);
215  AV_WN16(f->data[0] + y * f->linesize[0] + x * 2, s->yuv_color[0] * mult);
216  AV_WN16(f->data[1] + chromay * f->linesize[1] + chromax * 2, s->yuv_color[1] * mult);
217  AV_WN16(f->data[2] + chromay * f->linesize[2] + chromax * 2, s->yuv_color[2] * mult);
218 }
219 
220 static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
221 {
222  ThreadData *td = arg;
223  const SignalstatsContext *s = ctx->priv;
224  const AVFrame *in = td->in;
225  AVFrame *out = td->out;
226  const int w = in->width;
227  const int h = in->height;
228  const int slice_start = (h * jobnr ) / nb_jobs;
229  const int slice_end = (h * (jobnr+1)) / nb_jobs;
230  int x, y, score = 0;
231 
232  for (y = slice_start; y < slice_end; y++) {
233  const int yc = y >> s->vsub;
234  const uint8_t *pluma = &in->data[0][y * in->linesize[0]];
235  const uint8_t *pchromau = &in->data[1][yc * in->linesize[1]];
236  const uint8_t *pchromav = &in->data[2][yc * in->linesize[2]];
237 
238  for (x = 0; x < w; x++) {
239  const int xc = x >> s->hsub;
240  const int luma = pluma[x];
241  const int chromau = pchromau[xc];
242  const int chromav = pchromav[xc];
243  const int filt = luma < 16 || luma > 235 ||
244  chromau < 16 || chromau > 240 ||
245  chromav < 16 || chromav > 240;
246  score += filt;
247  if (out && filt)
248  burn_frame8(s, out, x, y);
249  }
250  }
251  return score;
252 }
253 
254 static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
255 {
256  ThreadData *td = arg;
257  const SignalstatsContext *s = ctx->priv;
258  const AVFrame *in = td->in;
259  AVFrame *out = td->out;
260  const int mult = 1 << (s->depth - 8);
261  const int w = in->width;
262  const int h = in->height;
263  const int slice_start = (h * jobnr ) / nb_jobs;
264  const int slice_end = (h * (jobnr+1)) / nb_jobs;
265  int x, y, score = 0;
266 
267  for (y = slice_start; y < slice_end; y++) {
268  const int yc = y >> s->vsub;
269  const uint16_t *pluma = (uint16_t *)&in->data[0][y * in->linesize[0]];
270  const uint16_t *pchromau = (uint16_t *)&in->data[1][yc * in->linesize[1]];
271  const uint16_t *pchromav = (uint16_t *)&in->data[2][yc * in->linesize[2]];
272 
273  for (x = 0; x < w; x++) {
274  const int xc = x >> s->hsub;
275  const int luma = pluma[x];
276  const int chromau = pchromau[xc];
277  const int chromav = pchromav[xc];
278  const int filt = luma < 16 * mult || luma > 235 * mult ||
279  chromau < 16 * mult || chromau > 240 * mult ||
280  chromav < 16 * mult || chromav > 240 * mult;
281  score += filt;
282  if (out && filt)
283  burn_frame16(s, out, x, y);
284  }
285  }
286  return score;
287 }
288 
290 {
291  return ((abs(x - y) + abs (z - y)) / 2) - abs(z - x) > 4; // make 4 configurable?
292 }
293 
294 static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
295 {
296  ThreadData *td = arg;
297  const SignalstatsContext *s = ctx->priv;
298  const AVFrame *in = td->in;
299  AVFrame *out = td->out;
300  const int w = in->width;
301  const int h = in->height;
302  const int slice_start = (h * jobnr ) / nb_jobs;
303  const int slice_end = (h * (jobnr+1)) / nb_jobs;
304  const uint8_t *p = in->data[0];
305  int lw = in->linesize[0];
306  int x, y, score = 0, filt;
307 
308  for (y = slice_start; y < slice_end; y++) {
309 
310  if (y - 1 < 0 || y + 1 >= h)
311  continue;
312 
313  // detect two pixels above and below (to eliminate interlace artefacts)
314  // should check that video format is infact interlaced.
315 
316 #define FILTER(i, j) \
317  filter_tout_outlier(p[(y-j) * lw + x + i], \
318  p[ y * lw + x + i], \
319  p[(y+j) * lw + x + i])
320 
321 #define FILTER3(j) (FILTER(-1, j) && FILTER(0, j) && FILTER(1, j))
322 
323  if (y - 2 >= 0 && y + 2 < h) {
324  for (x = 1; x < w - 1; x++) {
325  filt = FILTER3(2) && FILTER3(1);
326  score += filt;
327  if (filt && out)
328  burn_frame8(s, out, x, y);
329  }
330  } else {
331  for (x = 1; x < w - 1; x++) {
332  filt = FILTER3(1);
333  score += filt;
334  if (filt && out)
335  burn_frame8(s, out, x, y);
336  }
337  }
338  }
339  return score;
340 }
341 
342 static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
343 {
344  ThreadData *td = arg;
345  const SignalstatsContext *s = ctx->priv;
346  const AVFrame *in = td->in;
347  AVFrame *out = td->out;
348  const int w = in->width;
349  const int h = in->height;
350  const int slice_start = (h * jobnr ) / nb_jobs;
351  const int slice_end = (h * (jobnr+1)) / nb_jobs;
352  const uint16_t *p = (uint16_t *)in->data[0];
353  int lw = in->linesize[0] / 2;
354  int x, y, score = 0, filt;
355 
356  for (y = slice_start; y < slice_end; y++) {
357 
358  if (y - 1 < 0 || y + 1 >= h)
359  continue;
360 
361  // detect two pixels above and below (to eliminate interlace artefacts)
362  // should check that video format is infact interlaced.
363 
364  if (y - 2 >= 0 && y + 2 < h) {
365  for (x = 1; x < w - 1; x++) {
366  filt = FILTER3(2) && FILTER3(1);
367  score += filt;
368  if (filt && out)
369  burn_frame16(s, out, x, y);
370  }
371  } else {
372  for (x = 1; x < w - 1; x++) {
373  filt = FILTER3(1);
374  score += filt;
375  if (filt && out)
376  burn_frame16(s, out, x, y);
377  }
378  }
379  }
380  return score;
381 }
382 
383 #define VREP_START 4
384 
385 static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
386 {
387  ThreadData *td = arg;
388  const SignalstatsContext *s = ctx->priv;
389  const AVFrame *in = td->in;
390  AVFrame *out = td->out;
391  const int w = in->width;
392  const int h = in->height;
393  const int slice_start = (h * jobnr ) / nb_jobs;
394  const int slice_end = (h * (jobnr+1)) / nb_jobs;
395  const uint8_t *p = in->data[0];
396  const int lw = in->linesize[0];
397  int x, y, score = 0;
398 
399  for (y = slice_start; y < slice_end; y++) {
400  const int y2lw = (y - VREP_START) * lw;
401  const int ylw = y * lw;
402  int filt, totdiff = 0;
403 
404  if (y < VREP_START)
405  continue;
406 
407  for (x = 0; x < w; x++)
408  totdiff += abs(p[y2lw + x] - p[ylw + x]);
409  filt = totdiff < w;
410 
411  score += filt;
412  if (filt && out)
413  for (x = 0; x < w; x++)
414  burn_frame8(s, out, x, y);
415  }
416  return score * w;
417 }
418 
419 static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
420 {
421  ThreadData *td = arg;
422  const SignalstatsContext *s = ctx->priv;
423  const AVFrame *in = td->in;
424  AVFrame *out = td->out;
425  const int w = in->width;
426  const int h = in->height;
427  const int slice_start = (h * jobnr ) / nb_jobs;
428  const int slice_end = (h * (jobnr+1)) / nb_jobs;
429  const uint16_t *p = (uint16_t *)in->data[0];
430  const int lw = in->linesize[0] / 2;
431  int x, y, score = 0;
432 
433  for (y = slice_start; y < slice_end; y++) {
434  const int y2lw = (y - VREP_START) * lw;
435  const int ylw = y * lw;
436  int64_t totdiff = 0;
437  int filt;
438 
439  if (y < VREP_START)
440  continue;
441 
442  for (x = 0; x < w; x++)
443  totdiff += abs(p[y2lw + x] - p[ylw + x]);
444  filt = totdiff < w;
445 
446  score += filt;
447  if (filt && out)
448  for (x = 0; x < w; x++)
449  burn_frame16(s, out, x, y);
450  }
451  return score * w;
452 }
453 
454 static const struct {
455  const char *name;
456  int (*process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
457  int (*process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
458 } filters_def[] = {
459  {"TOUT", filter8_tout, filter16_tout},
460  {"VREP", filter8_vrep, filter16_vrep},
461  {"BRNG", filter8_brng, filter16_brng},
462  {NULL}
463 };
464 
465 #define DEPTH 256
466 
467 static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
468 {
469  int i, j;
471  const SignalstatsContext *s = ctx->priv;
472  const AVFrame *src = td->src;
473  AVFrame *dst_sat = td->dst_sat;
474  AVFrame *dst_hue = td->dst_hue;
475 
476  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
477  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
478 
479  const int lsz_u = src->linesize[1];
480  const int lsz_v = src->linesize[2];
481  const uint8_t *p_u = src->data[1] + slice_start * lsz_u;
482  const uint8_t *p_v = src->data[2] + slice_start * lsz_v;
483 
484  const int lsz_sat = dst_sat->linesize[0];
485  const int lsz_hue = dst_hue->linesize[0];
486  uint8_t *p_sat = dst_sat->data[0] + slice_start * lsz_sat;
487  uint8_t *p_hue = dst_hue->data[0] + slice_start * lsz_hue;
488 
489  for (j = slice_start; j < slice_end; j++) {
490  for (i = 0; i < s->chromaw; i++) {
491  const int yuvu = p_u[i];
492  const int yuvv = p_v[i];
493  p_sat[i] = hypot(yuvu - 128, yuvv - 128); // int or round?
494  ((int16_t*)p_hue)[i] = floor((180 / M_PI) * atan2f(yuvu-128, yuvv-128) + 180);
495  }
496  p_u += lsz_u;
497  p_v += lsz_v;
498  p_sat += lsz_sat;
499  p_hue += lsz_hue;
500  }
501 
502  return 0;
503 }
504 
505 static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
506 {
507  int i, j;
509  const SignalstatsContext *s = ctx->priv;
510  const AVFrame *src = td->src;
511  AVFrame *dst_sat = td->dst_sat;
512  AVFrame *dst_hue = td->dst_hue;
513  const int mid = 1 << (s->depth - 1);
514 
515  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
516  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
517 
518  const int lsz_u = src->linesize[1] / 2;
519  const int lsz_v = src->linesize[2] / 2;
520  const uint16_t *p_u = (uint16_t*)src->data[1] + slice_start * lsz_u;
521  const uint16_t *p_v = (uint16_t*)src->data[2] + slice_start * lsz_v;
522 
523  const int lsz_sat = dst_sat->linesize[0] / 2;
524  const int lsz_hue = dst_hue->linesize[0] / 2;
525  uint16_t *p_sat = (uint16_t*)dst_sat->data[0] + slice_start * lsz_sat;
526  uint16_t *p_hue = (uint16_t*)dst_hue->data[0] + slice_start * lsz_hue;
527 
528  for (j = slice_start; j < slice_end; j++) {
529  for (i = 0; i < s->chromaw; i++) {
530  const int yuvu = p_u[i];
531  const int yuvv = p_v[i];
532  p_sat[i] = hypot(yuvu - mid, yuvv - mid); // int or round?
533  ((int16_t*)p_hue)[i] = floor((180 / M_PI) * atan2f(yuvu-mid, yuvv-mid) + 180);
534  }
535  p_u += lsz_u;
536  p_v += lsz_v;
537  p_sat += lsz_sat;
538  p_hue += lsz_hue;
539  }
540 
541  return 0;
542 }
543 
544 static unsigned compute_bit_depth(uint16_t mask)
545 {
546  return av_popcount(mask);
547 }
548 
550 {
551  AVFilterContext *ctx = link->dst;
552  SignalstatsContext *s = ctx->priv;
553  AVFilterLink *outlink = ctx->outputs[0];
554  AVFrame *out = in;
555  int i, j;
556  int w = 0, cw = 0, // in
557  pw = 0, cpw = 0; // prev
558  int fil;
559  char metabuf[128];
560  unsigned int histy[DEPTH] = {0},
561  histu[DEPTH] = {0},
562  histv[DEPTH] = {0},
563  histhue[360] = {0},
564  histsat[DEPTH] = {0}; // limited to 8 bit data.
565  int miny = -1, minu = -1, minv = -1;
566  int maxy = -1, maxu = -1, maxv = -1;
567  int lowy = -1, lowu = -1, lowv = -1;
568  int highy = -1, highu = -1, highv = -1;
569  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
570  int lowp, highp, clowp, chighp;
571  int accy, accu, accv;
572  int accsat, acchue = 0;
573  int medhue, maxhue;
574  int toty = 0, totu = 0, totv = 0, totsat=0;
575  int tothue = 0;
576  int dify = 0, difu = 0, difv = 0;
577  uint16_t masky = 0, masku = 0, maskv = 0;
578 
579  int filtot[FILT_NUMB] = {0};
580  AVFrame *prev;
581 
582  AVFrame *sat = s->frame_sat;
583  AVFrame *hue = s->frame_hue;
584  const uint8_t *p_sat = sat->data[0];
585  const uint8_t *p_hue = hue->data[0];
586  const int lsz_sat = sat->linesize[0];
587  const int lsz_hue = hue->linesize[0];
588  ThreadDataHueSatMetrics td_huesat = {
589  .src = in,
590  .dst_sat = sat,
591  .dst_hue = hue,
592  };
593 
594  if (!s->frame_prev)
595  s->frame_prev = av_frame_clone(in);
596 
597  prev = s->frame_prev;
598 
599  if (s->outfilter != FILTER_NONE) {
600  out = av_frame_clone(in);
602  }
603 
604  ctx->internal->execute(ctx, compute_sat_hue_metrics8, &td_huesat,
606 
607  // Calculate luma histogram and difference with previous frame or field.
608  for (j = 0; j < link->h; j++) {
609  for (i = 0; i < link->w; i++) {
610  const int yuv = in->data[0][w + i];
611 
612  masky |= yuv;
613  histy[yuv]++;
614  dify += abs(yuv - prev->data[0][pw + i]);
615  }
616  w += in->linesize[0];
617  pw += prev->linesize[0];
618  }
619 
620  // Calculate chroma histogram and difference with previous frame or field.
621  for (j = 0; j < s->chromah; j++) {
622  for (i = 0; i < s->chromaw; i++) {
623  const int yuvu = in->data[1][cw+i];
624  const int yuvv = in->data[2][cw+i];
625 
626  masku |= yuvu;
627  maskv |= yuvv;
628  histu[yuvu]++;
629  difu += abs(yuvu - prev->data[1][cpw+i]);
630  histv[yuvv]++;
631  difv += abs(yuvv - prev->data[2][cpw+i]);
632 
633  histsat[p_sat[i]]++;
634  histhue[((int16_t*)p_hue)[i]]++;
635  }
636  cw += in->linesize[1];
637  cpw += prev->linesize[1];
638  p_sat += lsz_sat;
639  p_hue += lsz_hue;
640  }
641 
642  for (fil = 0; fil < FILT_NUMB; fil ++) {
643  if (s->filters & 1<<fil) {
644  ThreadData td = {
645  .in = in,
646  .out = out != in && s->outfilter == fil ? out : NULL,
647  };
648  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
649  ctx->internal->execute(ctx, filters_def[fil].process8,
650  &td, s->jobs_rets, s->nb_jobs);
651  for (i = 0; i < s->nb_jobs; i++)
652  filtot[fil] += s->jobs_rets[i];
653  }
654  }
655 
656  // find low / high based on histogram percentile
657  // these only need to be calculated once.
658 
659  lowp = lrint(s->fs * 10 / 100.);
660  highp = lrint(s->fs * 90 / 100.);
661  clowp = lrint(s->cfs * 10 / 100.);
662  chighp = lrint(s->cfs * 90 / 100.);
663 
664  accy = accu = accv = accsat = 0;
665  for (fil = 0; fil < DEPTH; fil++) {
666  if (miny < 0 && histy[fil]) miny = fil;
667  if (minu < 0 && histu[fil]) minu = fil;
668  if (minv < 0 && histv[fil]) minv = fil;
669  if (minsat < 0 && histsat[fil]) minsat = fil;
670 
671  if (histy[fil]) maxy = fil;
672  if (histu[fil]) maxu = fil;
673  if (histv[fil]) maxv = fil;
674  if (histsat[fil]) maxsat = fil;
675 
676  toty += histy[fil] * fil;
677  totu += histu[fil] * fil;
678  totv += histv[fil] * fil;
679  totsat += histsat[fil] * fil;
680 
681  accy += histy[fil];
682  accu += histu[fil];
683  accv += histv[fil];
684  accsat += histsat[fil];
685 
686  if (lowy == -1 && accy >= lowp) lowy = fil;
687  if (lowu == -1 && accu >= clowp) lowu = fil;
688  if (lowv == -1 && accv >= clowp) lowv = fil;
689  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
690 
691  if (highy == -1 && accy >= highp) highy = fil;
692  if (highu == -1 && accu >= chighp) highu = fil;
693  if (highv == -1 && accv >= chighp) highv = fil;
694  if (highsat == -1 && accsat >= chighp) highsat = fil;
695  }
696 
697  maxhue = histhue[0];
698  medhue = -1;
699  for (fil = 0; fil < 360; fil++) {
700  tothue += histhue[fil] * fil;
701  acchue += histhue[fil];
702 
703  if (medhue == -1 && acchue > s->cfs / 2)
704  medhue = fil;
705  if (histhue[fil] > maxhue) {
706  maxhue = histhue[fil];
707  }
708  }
709 
711  s->frame_prev = av_frame_clone(in);
712 
713 #define SET_META(key, fmt, val) do { \
714  snprintf(metabuf, sizeof(metabuf), fmt, val); \
715  av_dict_set(&out->metadata, "lavfi.signalstats." key, metabuf, 0); \
716 } while (0)
717 
718  SET_META("YMIN", "%d", miny);
719  SET_META("YLOW", "%d", lowy);
720  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
721  SET_META("YHIGH", "%d", highy);
722  SET_META("YMAX", "%d", maxy);
723 
724  SET_META("UMIN", "%d", minu);
725  SET_META("ULOW", "%d", lowu);
726  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
727  SET_META("UHIGH", "%d", highu);
728  SET_META("UMAX", "%d", maxu);
729 
730  SET_META("VMIN", "%d", minv);
731  SET_META("VLOW", "%d", lowv);
732  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
733  SET_META("VHIGH", "%d", highv);
734  SET_META("VMAX", "%d", maxv);
735 
736  SET_META("SATMIN", "%d", minsat);
737  SET_META("SATLOW", "%d", lowsat);
738  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
739  SET_META("SATHIGH", "%d", highsat);
740  SET_META("SATMAX", "%d", maxsat);
741 
742  SET_META("HUEMED", "%d", medhue);
743  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
744 
745  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
746  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
747  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
748 
749  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
750  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
751  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
752 
753  for (fil = 0; fil < FILT_NUMB; fil ++) {
754  if (s->filters & 1<<fil) {
755  char metaname[128];
756  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
757  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
758  av_dict_set(&out->metadata, metaname, metabuf, 0);
759  }
760  }
761 
762  if (in != out)
763  av_frame_free(&in);
764  return ff_filter_frame(outlink, out);
765 }
766 
768 {
769  AVFilterContext *ctx = link->dst;
770  SignalstatsContext *s = ctx->priv;
771  AVFilterLink *outlink = ctx->outputs[0];
772  AVFrame *out = in;
773  int i, j;
774  int w = 0, cw = 0, // in
775  pw = 0, cpw = 0; // prev
776  int fil;
777  char metabuf[128];
778  unsigned int *histy = s->histy,
779  *histu = s->histu,
780  *histv = s->histv,
781  histhue[360] = {0},
782  *histsat = s->histsat;
783  int miny = -1, minu = -1, minv = -1;
784  int maxy = -1, maxu = -1, maxv = -1;
785  int lowy = -1, lowu = -1, lowv = -1;
786  int highy = -1, highu = -1, highv = -1;
787  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
788  int lowp, highp, clowp, chighp;
789  int accy, accu, accv;
790  int accsat, acchue = 0;
791  int medhue, maxhue;
792  int64_t toty = 0, totu = 0, totv = 0, totsat=0;
793  int64_t tothue = 0;
794  int64_t dify = 0, difu = 0, difv = 0;
795  uint16_t masky = 0, masku = 0, maskv = 0;
796 
797  int filtot[FILT_NUMB] = {0};
798  AVFrame *prev;
799 
800  AVFrame *sat = s->frame_sat;
801  AVFrame *hue = s->frame_hue;
802  const uint16_t *p_sat = (uint16_t *)sat->data[0];
803  const uint16_t *p_hue = (uint16_t *)hue->data[0];
804  const int lsz_sat = sat->linesize[0] / 2;
805  const int lsz_hue = hue->linesize[0] / 2;
806  ThreadDataHueSatMetrics td_huesat = {
807  .src = in,
808  .dst_sat = sat,
809  .dst_hue = hue,
810  };
811 
812  if (!s->frame_prev)
813  s->frame_prev = av_frame_clone(in);
814 
815  prev = s->frame_prev;
816 
817  if (s->outfilter != FILTER_NONE) {
818  out = av_frame_clone(in);
820  }
821 
822  ctx->internal->execute(ctx, compute_sat_hue_metrics16, &td_huesat,
824 
825  // Calculate luma histogram and difference with previous frame or field.
826  memset(s->histy, 0, (1 << s->depth) * sizeof(*s->histy));
827  for (j = 0; j < link->h; j++) {
828  for (i = 0; i < link->w; i++) {
829  const int yuv = AV_RN16(in->data[0] + w + i * 2);
830 
831  masky |= yuv;
832  histy[yuv]++;
833  dify += abs(yuv - (int)AV_RN16(prev->data[0] + pw + i * 2));
834  }
835  w += in->linesize[0];
836  pw += prev->linesize[0];
837  }
838 
839  // Calculate chroma histogram and difference with previous frame or field.
840  memset(s->histu, 0, (1 << s->depth) * sizeof(*s->histu));
841  memset(s->histv, 0, (1 << s->depth) * sizeof(*s->histv));
842  memset(s->histsat, 0, (1 << s->depth) * sizeof(*s->histsat));
843  for (j = 0; j < s->chromah; j++) {
844  for (i = 0; i < s->chromaw; i++) {
845  const int yuvu = AV_RN16(in->data[1] + cw + i * 2);
846  const int yuvv = AV_RN16(in->data[2] + cw + i * 2);
847 
848  masku |= yuvu;
849  maskv |= yuvv;
850  histu[yuvu]++;
851  difu += abs(yuvu - (int)AV_RN16(prev->data[1] + cpw + i * 2));
852  histv[yuvv]++;
853  difv += abs(yuvv - (int)AV_RN16(prev->data[2] + cpw + i * 2));
854 
855  histsat[p_sat[i]]++;
856  histhue[((int16_t*)p_hue)[i]]++;
857  }
858  cw += in->linesize[1];
859  cpw += prev->linesize[1];
860  p_sat += lsz_sat;
861  p_hue += lsz_hue;
862  }
863 
864  for (fil = 0; fil < FILT_NUMB; fil ++) {
865  if (s->filters & 1<<fil) {
866  ThreadData td = {
867  .in = in,
868  .out = out != in && s->outfilter == fil ? out : NULL,
869  };
870  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
871  ctx->internal->execute(ctx, filters_def[fil].process16,
872  &td, s->jobs_rets, s->nb_jobs);
873  for (i = 0; i < s->nb_jobs; i++)
874  filtot[fil] += s->jobs_rets[i];
875  }
876  }
877 
878  // find low / high based on histogram percentile
879  // these only need to be calculated once.
880 
881  lowp = lrint(s->fs * 10 / 100.);
882  highp = lrint(s->fs * 90 / 100.);
883  clowp = lrint(s->cfs * 10 / 100.);
884  chighp = lrint(s->cfs * 90 / 100.);
885 
886  accy = accu = accv = accsat = 0;
887  for (fil = 0; fil < 1 << s->depth; fil++) {
888  if (miny < 0 && histy[fil]) miny = fil;
889  if (minu < 0 && histu[fil]) minu = fil;
890  if (minv < 0 && histv[fil]) minv = fil;
891  if (minsat < 0 && histsat[fil]) minsat = fil;
892 
893  if (histy[fil]) maxy = fil;
894  if (histu[fil]) maxu = fil;
895  if (histv[fil]) maxv = fil;
896  if (histsat[fil]) maxsat = fil;
897 
898  toty += histy[fil] * fil;
899  totu += histu[fil] * fil;
900  totv += histv[fil] * fil;
901  totsat += histsat[fil] * fil;
902 
903  accy += histy[fil];
904  accu += histu[fil];
905  accv += histv[fil];
906  accsat += histsat[fil];
907 
908  if (lowy == -1 && accy >= lowp) lowy = fil;
909  if (lowu == -1 && accu >= clowp) lowu = fil;
910  if (lowv == -1 && accv >= clowp) lowv = fil;
911  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
912 
913  if (highy == -1 && accy >= highp) highy = fil;
914  if (highu == -1 && accu >= chighp) highu = fil;
915  if (highv == -1 && accv >= chighp) highv = fil;
916  if (highsat == -1 && accsat >= chighp) highsat = fil;
917  }
918 
919  maxhue = histhue[0];
920  medhue = -1;
921  for (fil = 0; fil < 360; fil++) {
922  tothue += histhue[fil] * fil;
923  acchue += histhue[fil];
924 
925  if (medhue == -1 && acchue > s->cfs / 2)
926  medhue = fil;
927  if (histhue[fil] > maxhue) {
928  maxhue = histhue[fil];
929  }
930  }
931 
933  s->frame_prev = av_frame_clone(in);
934 
935  SET_META("YMIN", "%d", miny);
936  SET_META("YLOW", "%d", lowy);
937  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
938  SET_META("YHIGH", "%d", highy);
939  SET_META("YMAX", "%d", maxy);
940 
941  SET_META("UMIN", "%d", minu);
942  SET_META("ULOW", "%d", lowu);
943  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
944  SET_META("UHIGH", "%d", highu);
945  SET_META("UMAX", "%d", maxu);
946 
947  SET_META("VMIN", "%d", minv);
948  SET_META("VLOW", "%d", lowv);
949  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
950  SET_META("VHIGH", "%d", highv);
951  SET_META("VMAX", "%d", maxv);
952 
953  SET_META("SATMIN", "%d", minsat);
954  SET_META("SATLOW", "%d", lowsat);
955  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
956  SET_META("SATHIGH", "%d", highsat);
957  SET_META("SATMAX", "%d", maxsat);
958 
959  SET_META("HUEMED", "%d", medhue);
960  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
961 
962  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
963  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
964  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
965 
966  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
967  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
968  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
969 
970  for (fil = 0; fil < FILT_NUMB; fil ++) {
971  if (s->filters & 1<<fil) {
972  char metaname[128];
973  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
974  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
975  av_dict_set(&out->metadata, metaname, metabuf, 0);
976  }
977  }
978 
979  if (in != out)
980  av_frame_free(&in);
981  return ff_filter_frame(outlink, out);
982 }
983 
985 {
986  AVFilterContext *ctx = link->dst;
987  SignalstatsContext *s = ctx->priv;
988 
989  if (s->depth > 8)
990  return filter_frame16(link, in);
991  else
992  return filter_frame8(link, in);
993 }
994 
995 static const AVFilterPad signalstats_inputs[] = {
996  {
997  .name = "default",
998  .type = AVMEDIA_TYPE_VIDEO,
999  .filter_frame = filter_frame,
1000  },
1001  { NULL }
1002 };
1003 
1005  {
1006  .name = "default",
1007  .config_props = config_props,
1008  .type = AVMEDIA_TYPE_VIDEO,
1009  },
1010  { NULL }
1011 };
1012 
1014  .name = "signalstats",
1015  .description = "Generate statistics from video analysis.",
1016  .init = init,
1017  .uninit = uninit,
1018  .query_formats = query_formats,
1019  .priv_size = sizeof(SignalstatsContext),
1020  .inputs = signalstats_inputs,
1021  .outputs = signalstats_outputs,
1022  .priv_class = &signalstats_class,
1024 };
#define NULL
Definition: coverity.c:32
AVFrame * out
Definition: af_adeclick.c:488
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:389
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2522
This structure describes decoded (raw) audio or video data.
Definition: frame.h:268
AVOption.
Definition: opt.h:246
static av_cold void uninit(AVFilterContext *ctx)
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:397
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
const char * g
Definition: vf_curves.c:115
const char * desc
Definition: nvenc.c:68
static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
const char * name
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:391
static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
#define src
Definition: vp8dsp.c:254
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:92
static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
static const AVFilterPad signalstats_outputs[]
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
#define OFFSET(x)
const char * name
Pad name.
Definition: internal.h:60
static int filter_frame(AVFilterLink *link, AVFrame *in)
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1080
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
#define DEPTH
#define av_cold
Definition: attributes.h:82
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
AVOptions.
uint8_t rgba_color[4]
#define f(width, name)
Definition: cbs_vp9.c:255
const AVFrame * in
static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range...
Definition: pixfmt.h:100
int(* process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
#define atan2f(y, x)
Definition: libm.h:45
int(* process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
AVDictionary * metadata
metadata.
Definition: frame.h:554
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:400
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:392
A filter pad used for either input or output.
Definition: internal.h:54
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:260
int width
Definition: frame.h:326
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:568
#define td
Definition: regdef.h:70
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
static const uint16_t mask[17]
Definition: lzw.c:38
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
#define VREP_START
const char * r
Definition: vf_curves.c:114
void * priv
private data for use by the filter
Definition: avfilter.h:353
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:116
const char * arg
Definition: jacosubdec.c:66
AVFilter ff_vf_signalstats
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:390
#define FFMAX(a, b)
Definition: common.h:94
static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:385
static av_const double hypot(double x, double y)
Definition: libm.h:366
#define b
Definition: input.c:41
static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:802
static int16_t mult(Float11 *f1, Float11 *f2)
Definition: g726.c:55
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:371
#define FFMIN(a, b)
Definition: common.h:96
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
uint8_t w
Definition: llviddspenc.c:38
#define FILTER3(j)
static int filter_frame16(AVFilterLink *link, AVFrame *in)
AVFormatContext * ctx
Definition: movenc.c:48
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
#define s(width, name)
Definition: cbs_vp9.c:257
#define SET_META(key, fmt, val)
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:386
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:540
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:398
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:341
static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
FilterMode
Definition: vp9.h:64
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:395
Used for passing data between threads.
Definition: af_adeclick.c:487
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:299
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
#define AV_RN16(p)
Definition: intreadwrite.h:360
static const AVFilterPad signalstats_inputs[]
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
#define FLAGS
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31))))#define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac){}void ff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map){AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);return NULL;}return ac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;}int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){int use_generic=1;int len=in->nb_samples;int p;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:387
Describe the class of an AVClass context structure.
Definition: log.h:67
static const struct @237 filters_def[]
Filter definition.
Definition: avfilter.h:144
const char * name
Filter name.
Definition: avfilter.h:148
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:393
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:384
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
#define snprintf
Definition: snprintf.h:34
static AVFrame * alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:275
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:324
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:396
static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
static const int8_t filt[NUMTAPS]
Definition: af_earwax.c:39
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:611
#define flags(name, subs,...)
Definition: cbs_av1.c:561
AVFilterInternal * internal
An opaque struct for libavfilter internal use.
Definition: avfilter.h:378
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:388
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:394
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:282
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
static int config_props(AVFilterLink *outlink)
int
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
Y , 8bpp.
Definition: pixfmt.h:74
static unsigned compute_bit_depth(uint16_t mask)
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
avfilter_execute_func * execute
Definition: internal.h:155
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2029
static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
A list of supported formats for one end of a filter link.
Definition: formats.h:64
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:258
#define lrint
Definition: tablegen.h:53
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:202
static int filter_frame8(AVFilterLink *link, AVFrame *in)
An instance of a filter.
Definition: avfilter.h:338
static int query_formats(AVFilterContext *ctx)
int height
Definition: frame.h:326
AVFILTER_DEFINE_CLASS(signalstats)
FILE * out
Definition: movenc.c:54
#define av_freep(p)
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
#define AV_WN16(p, v)
Definition: intreadwrite.h:372
#define M_PI
Definition: mathematics.h:52
#define av_malloc_array(a, b)
AVFrame * in
Definition: af_afftdn.c:1082
static av_cold int init(AVFilterContext *ctx)
static const AVOption signalstats_options[]
internal API functions
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
int depth
Number of bits in the component.
Definition: pixdesc.h:58
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:399
for(j=16;j >0;--j)
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
const char * name
Definition: opengl_enc.c:102