FFmpeg
vf_signalstats.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
3  * Copyright (c) 2014 Clément Bœsch
4  * Copyright (c) 2014 Dave Rice @dericed
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include "libavutil/intreadwrite.h"
24 #include "libavutil/opt.h"
25 #include "libavutil/pixdesc.h"
26 #include "internal.h"
27 
28 enum FilterMode {
34 };
35 
36 typedef struct SignalstatsContext {
37  const AVClass *class;
38  int chromah; // height of chroma plane
39  int chromaw; // width of chroma plane
40  int hsub; // horizontal subsampling
41  int vsub; // vertical subsampling
42  int depth; // pixel depth
43  int fs; // pixel count per frame
44  int cfs; // pixel count per frame of chroma planes
45  int outfilter; // FilterMode
46  int filters;
49  int yuv_color[3];
50  int nb_jobs;
51  int *jobs_rets;
52 
53  int maxsize; // history stats array size
54  int *histy, *histu, *histv, *histsat;
55 
59 
60 typedef struct ThreadData {
61  const AVFrame *in;
62  AVFrame *out;
63 } ThreadData;
64 
65 typedef struct ThreadDataHueSatMetrics {
66  const AVFrame *src;
69 
70 #define OFFSET(x) offsetof(SignalstatsContext, x)
71 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
72 
73 static const AVOption signalstats_options[] = {
74  {"stat", "set statistics filters", OFFSET(filters), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "filters"},
75  {"tout", "analyze pixels for temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_TOUT}, 0, 0, FLAGS, "filters"},
76  {"vrep", "analyze video lines for vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_VREP}, 0, 0, FLAGS, "filters"},
77  {"brng", "analyze for pixels outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_BRNG}, 0, 0, FLAGS, "filters"},
78  {"out", "set video filter", OFFSET(outfilter), AV_OPT_TYPE_INT, {.i64=FILTER_NONE}, -1, FILT_NUMB-1, FLAGS, "out"},
79  {"tout", "highlight pixels that depict temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_TOUT}, 0, 0, FLAGS, "out"},
80  {"vrep", "highlight video lines that depict vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_VREP}, 0, 0, FLAGS, "out"},
81  {"brng", "highlight pixels that are outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_BRNG}, 0, 0, FLAGS, "out"},
82  {"c", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
83  {"color", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
84  {NULL}
85 };
86 
87 AVFILTER_DEFINE_CLASS(signalstats);
88 
90 {
91  uint8_t r, g, b;
92  SignalstatsContext *s = ctx->priv;
93 
94  if (s->outfilter != FILTER_NONE)
95  s->filters |= 1 << s->outfilter;
96 
97  r = s->rgba_color[0];
98  g = s->rgba_color[1];
99  b = s->rgba_color[2];
100  s->yuv_color[0] = (( 66*r + 129*g + 25*b + (1<<7)) >> 8) + 16;
101  s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
102  s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
103  return 0;
104 }
105 
107 {
108  SignalstatsContext *s = ctx->priv;
109  av_frame_free(&s->frame_prev);
110  av_frame_free(&s->frame_sat);
111  av_frame_free(&s->frame_hue);
112  av_freep(&s->jobs_rets);
113  av_freep(&s->histy);
114  av_freep(&s->histu);
115  av_freep(&s->histv);
116  av_freep(&s->histsat);
117 }
118 
120 {
121  // TODO: add more
122  static const enum AVPixelFormat pix_fmts[] = {
135  };
136 
138  if (!fmts_list)
139  return AVERROR(ENOMEM);
140  return ff_set_common_formats(ctx, fmts_list);
141 }
142 
143 static AVFrame *alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
144 {
146  if (!frame)
147  return NULL;
148 
149  frame->format = pixfmt;
150  frame->width = w;
151  frame->height = h;
152 
153  if (av_frame_get_buffer(frame, 0) < 0) {
155  return NULL;
156  }
157 
158  return frame;
159 }
160 
161 static int config_output(AVFilterLink *outlink)
162 {
163  AVFilterContext *ctx = outlink->src;
164  SignalstatsContext *s = ctx->priv;
165  AVFilterLink *inlink = outlink->src->inputs[0];
167  s->hsub = desc->log2_chroma_w;
168  s->vsub = desc->log2_chroma_h;
169  s->depth = desc->comp[0].depth;
170  s->maxsize = 1 << s->depth;
171  s->histy = av_malloc_array(s->maxsize, sizeof(*s->histy));
172  s->histu = av_malloc_array(s->maxsize, sizeof(*s->histu));
173  s->histv = av_malloc_array(s->maxsize, sizeof(*s->histv));
174  s->histsat = av_malloc_array(s->maxsize, sizeof(*s->histsat));
175 
176  if (!s->histy || !s->histu || !s->histv || !s->histsat)
177  return AVERROR(ENOMEM);
178 
179  outlink->w = inlink->w;
180  outlink->h = inlink->h;
181 
182  s->chromaw = AV_CEIL_RSHIFT(inlink->w, s->hsub);
183  s->chromah = AV_CEIL_RSHIFT(inlink->h, s->vsub);
184 
185  s->fs = inlink->w * inlink->h;
186  s->cfs = s->chromaw * s->chromah;
187 
188  s->nb_jobs = FFMAX(1, FFMIN(inlink->h, ff_filter_get_nb_threads(ctx)));
189  s->jobs_rets = av_malloc_array(s->nb_jobs, sizeof(*s->jobs_rets));
190  if (!s->jobs_rets)
191  return AVERROR(ENOMEM);
192 
193  s->frame_sat = alloc_frame(s->depth > 8 ? AV_PIX_FMT_GRAY16 : AV_PIX_FMT_GRAY8, inlink->w, inlink->h);
194  s->frame_hue = alloc_frame(AV_PIX_FMT_GRAY16, inlink->w, inlink->h);
195  if (!s->frame_sat || !s->frame_hue)
196  return AVERROR(ENOMEM);
197 
198  return 0;
199 }
200 
201 static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
202 {
203  const int chromax = x >> s->hsub;
204  const int chromay = y >> s->vsub;
205  f->data[0][y * f->linesize[0] + x] = s->yuv_color[0];
206  f->data[1][chromay * f->linesize[1] + chromax] = s->yuv_color[1];
207  f->data[2][chromay * f->linesize[2] + chromax] = s->yuv_color[2];
208 }
209 
210 static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
211 {
212  const int chromax = x >> s->hsub;
213  const int chromay = y >> s->vsub;
214  const int mult = 1 << (s->depth - 8);
215  AV_WN16(f->data[0] + y * f->linesize[0] + x * 2, s->yuv_color[0] * mult);
216  AV_WN16(f->data[1] + chromay * f->linesize[1] + chromax * 2, s->yuv_color[1] * mult);
217  AV_WN16(f->data[2] + chromay * f->linesize[2] + chromax * 2, s->yuv_color[2] * mult);
218 }
219 
220 static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
221 {
222  ThreadData *td = arg;
223  const SignalstatsContext *s = ctx->priv;
224  const AVFrame *in = td->in;
225  AVFrame *out = td->out;
226  const int w = in->width;
227  const int h = in->height;
228  const int slice_start = (h * jobnr ) / nb_jobs;
229  const int slice_end = (h * (jobnr+1)) / nb_jobs;
230  int x, y, score = 0;
231 
232  for (y = slice_start; y < slice_end; y++) {
233  const int yc = y >> s->vsub;
234  const uint8_t *pluma = &in->data[0][y * in->linesize[0]];
235  const uint8_t *pchromau = &in->data[1][yc * in->linesize[1]];
236  const uint8_t *pchromav = &in->data[2][yc * in->linesize[2]];
237 
238  for (x = 0; x < w; x++) {
239  const int xc = x >> s->hsub;
240  const int luma = pluma[x];
241  const int chromau = pchromau[xc];
242  const int chromav = pchromav[xc];
243  const int filt = luma < 16 || luma > 235 ||
244  chromau < 16 || chromau > 240 ||
245  chromav < 16 || chromav > 240;
246  score += filt;
247  if (out && filt)
248  burn_frame8(s, out, x, y);
249  }
250  }
251  return score;
252 }
253 
254 static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
255 {
256  ThreadData *td = arg;
257  const SignalstatsContext *s = ctx->priv;
258  const AVFrame *in = td->in;
259  AVFrame *out = td->out;
260  const int mult = 1 << (s->depth - 8);
261  const int w = in->width;
262  const int h = in->height;
263  const int slice_start = (h * jobnr ) / nb_jobs;
264  const int slice_end = (h * (jobnr+1)) / nb_jobs;
265  int x, y, score = 0;
266 
267  for (y = slice_start; y < slice_end; y++) {
268  const int yc = y >> s->vsub;
269  const uint16_t *pluma = (uint16_t *)&in->data[0][y * in->linesize[0]];
270  const uint16_t *pchromau = (uint16_t *)&in->data[1][yc * in->linesize[1]];
271  const uint16_t *pchromav = (uint16_t *)&in->data[2][yc * in->linesize[2]];
272 
273  for (x = 0; x < w; x++) {
274  const int xc = x >> s->hsub;
275  const int luma = pluma[x];
276  const int chromau = pchromau[xc];
277  const int chromav = pchromav[xc];
278  const int filt = luma < 16 * mult || luma > 235 * mult ||
279  chromau < 16 * mult || chromau > 240 * mult ||
280  chromav < 16 * mult || chromav > 240 * mult;
281  score += filt;
282  if (out && filt)
283  burn_frame16(s, out, x, y);
284  }
285  }
286  return score;
287 }
288 
290 {
291  return ((abs(x - y) + abs (z - y)) / 2) - abs(z - x) > 4; // make 4 configurable?
292 }
293 
294 static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
295 {
296  ThreadData *td = arg;
297  const SignalstatsContext *s = ctx->priv;
298  const AVFrame *in = td->in;
299  AVFrame *out = td->out;
300  const int w = in->width;
301  const int h = in->height;
302  const int slice_start = (h * jobnr ) / nb_jobs;
303  const int slice_end = (h * (jobnr+1)) / nb_jobs;
304  const uint8_t *p = in->data[0];
305  int lw = in->linesize[0];
306  int x, y, score = 0, filt;
307 
308  for (y = slice_start; y < slice_end; y++) {
309 
310  if (y - 1 < 0 || y + 1 >= h)
311  continue;
312 
313  // detect two pixels above and below (to eliminate interlace artefacts)
314  // should check that video format is infact interlaced.
315 
316 #define FILTER(i, j) \
317  filter_tout_outlier(p[(y-j) * lw + x + i], \
318  p[ y * lw + x + i], \
319  p[(y+j) * lw + x + i])
320 
321 #define FILTER3(j) (FILTER(-1, j) && FILTER(0, j) && FILTER(1, j))
322 
323  if (y - 2 >= 0 && y + 2 < h) {
324  for (x = 1; x < w - 1; x++) {
325  filt = FILTER3(2) && FILTER3(1);
326  score += filt;
327  if (filt && out)
328  burn_frame8(s, out, x, y);
329  }
330  } else {
331  for (x = 1; x < w - 1; x++) {
332  filt = FILTER3(1);
333  score += filt;
334  if (filt && out)
335  burn_frame8(s, out, x, y);
336  }
337  }
338  }
339  return score;
340 }
341 
342 static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
343 {
344  ThreadData *td = arg;
345  const SignalstatsContext *s = ctx->priv;
346  const AVFrame *in = td->in;
347  AVFrame *out = td->out;
348  const int w = in->width;
349  const int h = in->height;
350  const int slice_start = (h * jobnr ) / nb_jobs;
351  const int slice_end = (h * (jobnr+1)) / nb_jobs;
352  const uint16_t *p = (uint16_t *)in->data[0];
353  int lw = in->linesize[0] / 2;
354  int x, y, score = 0, filt;
355 
356  for (y = slice_start; y < slice_end; y++) {
357 
358  if (y - 1 < 0 || y + 1 >= h)
359  continue;
360 
361  // detect two pixels above and below (to eliminate interlace artefacts)
362  // should check that video format is infact interlaced.
363 
364  if (y - 2 >= 0 && y + 2 < h) {
365  for (x = 1; x < w - 1; x++) {
366  filt = FILTER3(2) && FILTER3(1);
367  score += filt;
368  if (filt && out)
369  burn_frame16(s, out, x, y);
370  }
371  } else {
372  for (x = 1; x < w - 1; x++) {
373  filt = FILTER3(1);
374  score += filt;
375  if (filt && out)
376  burn_frame16(s, out, x, y);
377  }
378  }
379  }
380  return score;
381 }
382 
383 #define VREP_START 4
384 
385 static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
386 {
387  ThreadData *td = arg;
388  const SignalstatsContext *s = ctx->priv;
389  const AVFrame *in = td->in;
390  AVFrame *out = td->out;
391  const int w = in->width;
392  const int h = in->height;
393  const int slice_start = (h * jobnr ) / nb_jobs;
394  const int slice_end = (h * (jobnr+1)) / nb_jobs;
395  const uint8_t *p = in->data[0];
396  const int lw = in->linesize[0];
397  int x, y, score = 0;
398 
399  for (y = slice_start; y < slice_end; y++) {
400  const int y2lw = (y - VREP_START) * lw;
401  const int ylw = y * lw;
402  int filt, totdiff = 0;
403 
404  if (y < VREP_START)
405  continue;
406 
407  for (x = 0; x < w; x++)
408  totdiff += abs(p[y2lw + x] - p[ylw + x]);
409  filt = totdiff < w;
410 
411  score += filt;
412  if (filt && out)
413  for (x = 0; x < w; x++)
414  burn_frame8(s, out, x, y);
415  }
416  return score * w;
417 }
418 
419 static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
420 {
421  ThreadData *td = arg;
422  const SignalstatsContext *s = ctx->priv;
423  const AVFrame *in = td->in;
424  AVFrame *out = td->out;
425  const int w = in->width;
426  const int h = in->height;
427  const int slice_start = (h * jobnr ) / nb_jobs;
428  const int slice_end = (h * (jobnr+1)) / nb_jobs;
429  const uint16_t *p = (uint16_t *)in->data[0];
430  const int lw = in->linesize[0] / 2;
431  int x, y, score = 0;
432 
433  for (y = slice_start; y < slice_end; y++) {
434  const int y2lw = (y - VREP_START) * lw;
435  const int ylw = y * lw;
436  int64_t totdiff = 0;
437  int filt;
438 
439  if (y < VREP_START)
440  continue;
441 
442  for (x = 0; x < w; x++)
443  totdiff += abs(p[y2lw + x] - p[ylw + x]);
444  filt = totdiff < w;
445 
446  score += filt;
447  if (filt && out)
448  for (x = 0; x < w; x++)
449  burn_frame16(s, out, x, y);
450  }
451  return score * w;
452 }
453 
454 static const struct {
455  const char *name;
456  int (*process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
457  int (*process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
458 } filters_def[] = {
459  {"TOUT", filter8_tout, filter16_tout},
460  {"VREP", filter8_vrep, filter16_vrep},
461  {"BRNG", filter8_brng, filter16_brng},
462  {NULL}
463 };
464 
465 static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
466 {
467  int i, j;
469  const SignalstatsContext *s = ctx->priv;
470  const AVFrame *src = td->src;
471  AVFrame *dst_sat = td->dst_sat;
472  AVFrame *dst_hue = td->dst_hue;
473 
474  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
475  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
476 
477  const int lsz_u = src->linesize[1];
478  const int lsz_v = src->linesize[2];
479  const uint8_t *p_u = src->data[1] + slice_start * lsz_u;
480  const uint8_t *p_v = src->data[2] + slice_start * lsz_v;
481 
482  const int lsz_sat = dst_sat->linesize[0];
483  const int lsz_hue = dst_hue->linesize[0];
484  uint8_t *p_sat = dst_sat->data[0] + slice_start * lsz_sat;
485  uint8_t *p_hue = dst_hue->data[0] + slice_start * lsz_hue;
486 
487  for (j = slice_start; j < slice_end; j++) {
488  for (i = 0; i < s->chromaw; i++) {
489  const int yuvu = p_u[i];
490  const int yuvv = p_v[i];
491  p_sat[i] = hypot(yuvu - 128, yuvv - 128); // int or round?
492  ((int16_t*)p_hue)[i] = fmod(floor((180 / M_PI) * atan2f(yuvu-128, yuvv-128) + 180), 360.);
493  }
494  p_u += lsz_u;
495  p_v += lsz_v;
496  p_sat += lsz_sat;
497  p_hue += lsz_hue;
498  }
499 
500  return 0;
501 }
502 
503 static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
504 {
505  int i, j;
507  const SignalstatsContext *s = ctx->priv;
508  const AVFrame *src = td->src;
509  AVFrame *dst_sat = td->dst_sat;
510  AVFrame *dst_hue = td->dst_hue;
511  const int mid = 1 << (s->depth - 1);
512 
513  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
514  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
515 
516  const int lsz_u = src->linesize[1] / 2;
517  const int lsz_v = src->linesize[2] / 2;
518  const uint16_t *p_u = (uint16_t*)src->data[1] + slice_start * lsz_u;
519  const uint16_t *p_v = (uint16_t*)src->data[2] + slice_start * lsz_v;
520 
521  const int lsz_sat = dst_sat->linesize[0] / 2;
522  const int lsz_hue = dst_hue->linesize[0] / 2;
523  uint16_t *p_sat = (uint16_t*)dst_sat->data[0] + slice_start * lsz_sat;
524  uint16_t *p_hue = (uint16_t*)dst_hue->data[0] + slice_start * lsz_hue;
525 
526  for (j = slice_start; j < slice_end; j++) {
527  for (i = 0; i < s->chromaw; i++) {
528  const int yuvu = p_u[i];
529  const int yuvv = p_v[i];
530  p_sat[i] = hypot(yuvu - mid, yuvv - mid); // int or round?
531  ((int16_t*)p_hue)[i] = fmod(floor((180 / M_PI) * atan2f(yuvu-mid, yuvv-mid) + 180), 360.);
532  }
533  p_u += lsz_u;
534  p_v += lsz_v;
535  p_sat += lsz_sat;
536  p_hue += lsz_hue;
537  }
538 
539  return 0;
540 }
541 
542 static unsigned compute_bit_depth(uint16_t mask)
543 {
544  return av_popcount(mask);
545 }
546 
548 {
549  AVFilterContext *ctx = link->dst;
550  SignalstatsContext *s = ctx->priv;
551  AVFilterLink *outlink = ctx->outputs[0];
552  AVFrame *out = in;
553  int i, j;
554  int w = 0, cw = 0, // in
555  pw = 0, cpw = 0; // prev
556  int fil;
557  char metabuf[128];
558  unsigned int *histy = s->histy,
559  *histu = s->histu,
560  *histv = s->histv,
561  histhue[360] = {0},
562  *histsat = s->histsat;
563  int miny = -1, minu = -1, minv = -1;
564  int maxy = -1, maxu = -1, maxv = -1;
565  int lowy = -1, lowu = -1, lowv = -1;
566  int highy = -1, highu = -1, highv = -1;
567  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
568  int lowp, highp, clowp, chighp;
569  int accy, accu, accv;
570  int accsat, acchue = 0;
571  int medhue, maxhue;
572  int toty = 0, totu = 0, totv = 0, totsat=0;
573  int tothue = 0;
574  int dify = 0, difu = 0, difv = 0;
575  uint16_t masky = 0, masku = 0, maskv = 0;
576 
577  int filtot[FILT_NUMB] = {0};
578  AVFrame *prev;
579 
580  AVFrame *sat = s->frame_sat;
581  AVFrame *hue = s->frame_hue;
582  const uint8_t *p_sat = sat->data[0];
583  const uint8_t *p_hue = hue->data[0];
584  const int lsz_sat = sat->linesize[0];
585  const int lsz_hue = hue->linesize[0];
586  ThreadDataHueSatMetrics td_huesat = {
587  .src = in,
588  .dst_sat = sat,
589  .dst_hue = hue,
590  };
591 
592  if (!s->frame_prev)
593  s->frame_prev = av_frame_clone(in);
594 
595  prev = s->frame_prev;
596 
597  if (s->outfilter != FILTER_NONE) {
598  out = av_frame_clone(in);
600  }
601 
602  ctx->internal->execute(ctx, compute_sat_hue_metrics8, &td_huesat,
603  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
604 
605  // Calculate luma histogram and difference with previous frame or field.
606  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
607  for (j = 0; j < link->h; j++) {
608  for (i = 0; i < link->w; i++) {
609  const int yuv = in->data[0][w + i];
610 
611  masky |= yuv;
612  histy[yuv]++;
613  dify += abs(yuv - prev->data[0][pw + i]);
614  }
615  w += in->linesize[0];
616  pw += prev->linesize[0];
617  }
618 
619  // Calculate chroma histogram and difference with previous frame or field.
620  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
621  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
622  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
623  for (j = 0; j < s->chromah; j++) {
624  for (i = 0; i < s->chromaw; i++) {
625  const int yuvu = in->data[1][cw+i];
626  const int yuvv = in->data[2][cw+i];
627 
628  masku |= yuvu;
629  maskv |= yuvv;
630  histu[yuvu]++;
631  difu += abs(yuvu - prev->data[1][cpw+i]);
632  histv[yuvv]++;
633  difv += abs(yuvv - prev->data[2][cpw+i]);
634 
635  histsat[p_sat[i]]++;
636  histhue[((int16_t*)p_hue)[i]]++;
637  }
638  cw += in->linesize[1];
639  cpw += prev->linesize[1];
640  p_sat += lsz_sat;
641  p_hue += lsz_hue;
642  }
643 
644  for (fil = 0; fil < FILT_NUMB; fil ++) {
645  if (s->filters & 1<<fil) {
646  ThreadData td = {
647  .in = in,
648  .out = out != in && s->outfilter == fil ? out : NULL,
649  };
650  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
651  ctx->internal->execute(ctx, filters_def[fil].process8,
652  &td, s->jobs_rets, s->nb_jobs);
653  for (i = 0; i < s->nb_jobs; i++)
654  filtot[fil] += s->jobs_rets[i];
655  }
656  }
657 
658  // find low / high based on histogram percentile
659  // these only need to be calculated once.
660 
661  lowp = lrint(s->fs * 10 / 100.);
662  highp = lrint(s->fs * 90 / 100.);
663  clowp = lrint(s->cfs * 10 / 100.);
664  chighp = lrint(s->cfs * 90 / 100.);
665 
666  accy = accu = accv = accsat = 0;
667  for (fil = 0; fil < s->maxsize; fil++) {
668  if (miny < 0 && histy[fil]) miny = fil;
669  if (minu < 0 && histu[fil]) minu = fil;
670  if (minv < 0 && histv[fil]) minv = fil;
671  if (minsat < 0 && histsat[fil]) minsat = fil;
672 
673  if (histy[fil]) maxy = fil;
674  if (histu[fil]) maxu = fil;
675  if (histv[fil]) maxv = fil;
676  if (histsat[fil]) maxsat = fil;
677 
678  toty += histy[fil] * fil;
679  totu += histu[fil] * fil;
680  totv += histv[fil] * fil;
681  totsat += histsat[fil] * fil;
682 
683  accy += histy[fil];
684  accu += histu[fil];
685  accv += histv[fil];
686  accsat += histsat[fil];
687 
688  if (lowy == -1 && accy >= lowp) lowy = fil;
689  if (lowu == -1 && accu >= clowp) lowu = fil;
690  if (lowv == -1 && accv >= clowp) lowv = fil;
691  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
692 
693  if (highy == -1 && accy >= highp) highy = fil;
694  if (highu == -1 && accu >= chighp) highu = fil;
695  if (highv == -1 && accv >= chighp) highv = fil;
696  if (highsat == -1 && accsat >= chighp) highsat = fil;
697  }
698 
699  maxhue = histhue[0];
700  medhue = -1;
701  for (fil = 0; fil < 360; fil++) {
702  tothue += histhue[fil] * fil;
703  acchue += histhue[fil];
704 
705  if (medhue == -1 && acchue > s->cfs / 2)
706  medhue = fil;
707  if (histhue[fil] > maxhue) {
708  maxhue = histhue[fil];
709  }
710  }
711 
712  av_frame_free(&s->frame_prev);
713  s->frame_prev = av_frame_clone(in);
714 
715 #define SET_META(key, fmt, val) do { \
716  snprintf(metabuf, sizeof(metabuf), fmt, val); \
717  av_dict_set(&out->metadata, "lavfi.signalstats." key, metabuf, 0); \
718 } while (0)
719 
720  SET_META("YMIN", "%d", miny);
721  SET_META("YLOW", "%d", lowy);
722  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
723  SET_META("YHIGH", "%d", highy);
724  SET_META("YMAX", "%d", maxy);
725 
726  SET_META("UMIN", "%d", minu);
727  SET_META("ULOW", "%d", lowu);
728  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
729  SET_META("UHIGH", "%d", highu);
730  SET_META("UMAX", "%d", maxu);
731 
732  SET_META("VMIN", "%d", minv);
733  SET_META("VLOW", "%d", lowv);
734  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
735  SET_META("VHIGH", "%d", highv);
736  SET_META("VMAX", "%d", maxv);
737 
738  SET_META("SATMIN", "%d", minsat);
739  SET_META("SATLOW", "%d", lowsat);
740  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
741  SET_META("SATHIGH", "%d", highsat);
742  SET_META("SATMAX", "%d", maxsat);
743 
744  SET_META("HUEMED", "%d", medhue);
745  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
746 
747  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
748  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
749  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
750 
751  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
752  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
753  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
754 
755  for (fil = 0; fil < FILT_NUMB; fil ++) {
756  if (s->filters & 1<<fil) {
757  char metaname[128];
758  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
759  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
760  av_dict_set(&out->metadata, metaname, metabuf, 0);
761  }
762  }
763 
764  if (in != out)
765  av_frame_free(&in);
766  return ff_filter_frame(outlink, out);
767 }
768 
770 {
771  AVFilterContext *ctx = link->dst;
772  SignalstatsContext *s = ctx->priv;
773  AVFilterLink *outlink = ctx->outputs[0];
774  AVFrame *out = in;
775  int i, j;
776  int w = 0, cw = 0, // in
777  pw = 0, cpw = 0; // prev
778  int fil;
779  char metabuf[128];
780  unsigned int *histy = s->histy,
781  *histu = s->histu,
782  *histv = s->histv,
783  histhue[360] = {0},
784  *histsat = s->histsat;
785  int miny = -1, minu = -1, minv = -1;
786  int maxy = -1, maxu = -1, maxv = -1;
787  int lowy = -1, lowu = -1, lowv = -1;
788  int highy = -1, highu = -1, highv = -1;
789  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
790  int lowp, highp, clowp, chighp;
791  int accy, accu, accv;
792  int accsat, acchue = 0;
793  int medhue, maxhue;
794  int64_t toty = 0, totu = 0, totv = 0, totsat=0;
795  int64_t tothue = 0;
796  int64_t dify = 0, difu = 0, difv = 0;
797  uint16_t masky = 0, masku = 0, maskv = 0;
798 
799  int filtot[FILT_NUMB] = {0};
800  AVFrame *prev;
801 
802  AVFrame *sat = s->frame_sat;
803  AVFrame *hue = s->frame_hue;
804  const uint16_t *p_sat = (uint16_t *)sat->data[0];
805  const uint16_t *p_hue = (uint16_t *)hue->data[0];
806  const int lsz_sat = sat->linesize[0] / 2;
807  const int lsz_hue = hue->linesize[0] / 2;
808  ThreadDataHueSatMetrics td_huesat = {
809  .src = in,
810  .dst_sat = sat,
811  .dst_hue = hue,
812  };
813 
814  if (!s->frame_prev)
815  s->frame_prev = av_frame_clone(in);
816 
817  prev = s->frame_prev;
818 
819  if (s->outfilter != FILTER_NONE) {
820  out = av_frame_clone(in);
822  }
823 
824  ctx->internal->execute(ctx, compute_sat_hue_metrics16, &td_huesat,
825  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
826 
827  // Calculate luma histogram and difference with previous frame or field.
828  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
829  for (j = 0; j < link->h; j++) {
830  for (i = 0; i < link->w; i++) {
831  const int yuv = AV_RN16(in->data[0] + w + i * 2);
832 
833  masky |= yuv;
834  histy[yuv]++;
835  dify += abs(yuv - (int)AV_RN16(prev->data[0] + pw + i * 2));
836  }
837  w += in->linesize[0];
838  pw += prev->linesize[0];
839  }
840 
841  // Calculate chroma histogram and difference with previous frame or field.
842  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
843  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
844  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
845  for (j = 0; j < s->chromah; j++) {
846  for (i = 0; i < s->chromaw; i++) {
847  const int yuvu = AV_RN16(in->data[1] + cw + i * 2);
848  const int yuvv = AV_RN16(in->data[2] + cw + i * 2);
849 
850  masku |= yuvu;
851  maskv |= yuvv;
852  histu[yuvu]++;
853  difu += abs(yuvu - (int)AV_RN16(prev->data[1] + cpw + i * 2));
854  histv[yuvv]++;
855  difv += abs(yuvv - (int)AV_RN16(prev->data[2] + cpw + i * 2));
856 
857  histsat[p_sat[i]]++;
858  histhue[((int16_t*)p_hue)[i]]++;
859  }
860  cw += in->linesize[1];
861  cpw += prev->linesize[1];
862  p_sat += lsz_sat;
863  p_hue += lsz_hue;
864  }
865 
866  for (fil = 0; fil < FILT_NUMB; fil ++) {
867  if (s->filters & 1<<fil) {
868  ThreadData td = {
869  .in = in,
870  .out = out != in && s->outfilter == fil ? out : NULL,
871  };
872  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
873  ctx->internal->execute(ctx, filters_def[fil].process16,
874  &td, s->jobs_rets, s->nb_jobs);
875  for (i = 0; i < s->nb_jobs; i++)
876  filtot[fil] += s->jobs_rets[i];
877  }
878  }
879 
880  // find low / high based on histogram percentile
881  // these only need to be calculated once.
882 
883  lowp = lrint(s->fs * 10 / 100.);
884  highp = lrint(s->fs * 90 / 100.);
885  clowp = lrint(s->cfs * 10 / 100.);
886  chighp = lrint(s->cfs * 90 / 100.);
887 
888  accy = accu = accv = accsat = 0;
889  for (fil = 0; fil < s->maxsize; fil++) {
890  if (miny < 0 && histy[fil]) miny = fil;
891  if (minu < 0 && histu[fil]) minu = fil;
892  if (minv < 0 && histv[fil]) minv = fil;
893  if (minsat < 0 && histsat[fil]) minsat = fil;
894 
895  if (histy[fil]) maxy = fil;
896  if (histu[fil]) maxu = fil;
897  if (histv[fil]) maxv = fil;
898  if (histsat[fil]) maxsat = fil;
899 
900  toty += histy[fil] * fil;
901  totu += histu[fil] * fil;
902  totv += histv[fil] * fil;
903  totsat += histsat[fil] * fil;
904 
905  accy += histy[fil];
906  accu += histu[fil];
907  accv += histv[fil];
908  accsat += histsat[fil];
909 
910  if (lowy == -1 && accy >= lowp) lowy = fil;
911  if (lowu == -1 && accu >= clowp) lowu = fil;
912  if (lowv == -1 && accv >= clowp) lowv = fil;
913  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
914 
915  if (highy == -1 && accy >= highp) highy = fil;
916  if (highu == -1 && accu >= chighp) highu = fil;
917  if (highv == -1 && accv >= chighp) highv = fil;
918  if (highsat == -1 && accsat >= chighp) highsat = fil;
919  }
920 
921  maxhue = histhue[0];
922  medhue = -1;
923  for (fil = 0; fil < 360; fil++) {
924  tothue += histhue[fil] * fil;
925  acchue += histhue[fil];
926 
927  if (medhue == -1 && acchue > s->cfs / 2)
928  medhue = fil;
929  if (histhue[fil] > maxhue) {
930  maxhue = histhue[fil];
931  }
932  }
933 
934  av_frame_free(&s->frame_prev);
935  s->frame_prev = av_frame_clone(in);
936 
937  SET_META("YMIN", "%d", miny);
938  SET_META("YLOW", "%d", lowy);
939  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
940  SET_META("YHIGH", "%d", highy);
941  SET_META("YMAX", "%d", maxy);
942 
943  SET_META("UMIN", "%d", minu);
944  SET_META("ULOW", "%d", lowu);
945  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
946  SET_META("UHIGH", "%d", highu);
947  SET_META("UMAX", "%d", maxu);
948 
949  SET_META("VMIN", "%d", minv);
950  SET_META("VLOW", "%d", lowv);
951  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
952  SET_META("VHIGH", "%d", highv);
953  SET_META("VMAX", "%d", maxv);
954 
955  SET_META("SATMIN", "%d", minsat);
956  SET_META("SATLOW", "%d", lowsat);
957  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
958  SET_META("SATHIGH", "%d", highsat);
959  SET_META("SATMAX", "%d", maxsat);
960 
961  SET_META("HUEMED", "%d", medhue);
962  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
963 
964  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
965  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
966  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
967 
968  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
969  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
970  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
971 
972  for (fil = 0; fil < FILT_NUMB; fil ++) {
973  if (s->filters & 1<<fil) {
974  char metaname[128];
975  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
976  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
977  av_dict_set(&out->metadata, metaname, metabuf, 0);
978  }
979  }
980 
981  if (in != out)
982  av_frame_free(&in);
983  return ff_filter_frame(outlink, out);
984 }
985 
987 {
988  AVFilterContext *ctx = link->dst;
989  SignalstatsContext *s = ctx->priv;
990 
991  if (s->depth > 8)
992  return filter_frame16(link, in);
993  else
994  return filter_frame8(link, in);
995 }
996 
997 static const AVFilterPad signalstats_inputs[] = {
998  {
999  .name = "default",
1000  .type = AVMEDIA_TYPE_VIDEO,
1001  .filter_frame = filter_frame,
1002  },
1003  { NULL }
1004 };
1005 
1007  {
1008  .name = "default",
1009  .config_props = config_output,
1010  .type = AVMEDIA_TYPE_VIDEO,
1011  },
1012  { NULL }
1013 };
1014 
1016  .name = "signalstats",
1017  .description = "Generate statistics from video analysis.",
1018  .init = init,
1019  .uninit = uninit,
1020  .query_formats = query_formats,
1021  .priv_size = sizeof(SignalstatsContext),
1024  .priv_class = &signalstats_class,
1026 };
td
#define td
Definition: regdef.h:70
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
filter_frame8
static int filter_frame8(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:547
r
const char * r
Definition: vf_curves.c:116
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:286
compute_sat_hue_metrics16
static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:503
SignalstatsContext::vsub
int vsub
Definition: vf_signalstats.c:41
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:337
SignalstatsContext::rgba_color
uint8_t rgba_color[4]
Definition: vf_signalstats.c:48
SignalstatsContext::chromah
int chromah
Definition: vf_signalstats.c:38
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1096
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_signalstats.c:89
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2573
filters
static const struct PPFilter filters[]
Definition: postprocess.c:134
SET_META
#define SET_META(key, fmt, val)
filter8_brng
static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:220
atan2f
#define atan2f(y, x)
Definition: libm.h:45
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
AV_RN16
#define AV_RN16(p)
Definition: intreadwrite.h:360
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
SignalstatsContext::fs
int fs
Definition: vf_signalstats.c:43
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
av_frame_make_writable
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:611
pixdesc.h
SignalstatsContext::nb_jobs
int nb_jobs
Definition: vf_signalstats.c:50
w
uint8_t w
Definition: llviddspenc.c:39
AVOption
AVOption.
Definition: opt.h:248
b
#define b
Definition: input.c:41
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:399
VREP_START
#define VREP_START
Definition: vf_signalstats.c:383
SignalstatsContext::histv
int * histv
Definition: vf_signalstats.c:54
ThreadData::in
const AVFrame * in
Definition: vf_signalstats.c:61
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(signalstats)
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:149
av_popcount
#define av_popcount
Definition: common.h:176
ThreadData::out
AVFrame * out
Definition: af_adeclick.c:502
AVFormatContext::internal
AVFormatInternal * internal
An opaque field for libavformat internal usage.
Definition: avformat.h:1699
filter_frame16
static int filter_frame16(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:769
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
FLAGS
#define FLAGS
Definition: vf_signalstats.c:71
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:65
FilterMode
FilterMode
Definition: vp9.h:64
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:397
SignalstatsContext::histu
int * histu
Definition: vf_signalstats.c:54
filter16_tout
static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:342
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:383
filter_tout_outlier
static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
Definition: vf_signalstats.c:289
FILT_NUMB
@ FILT_NUMB
Definition: vf_signalstats.c:33
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:402
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:258
SignalstatsContext
Definition: vf_signalstats.c:36
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_signalstats.c:106
mult
static int16_t mult(Float11 *f1, Float11 *f2)
Definition: g726.c:55
lrint
#define lrint
Definition: tablegen.h:53
FILTER_VREP
@ FILTER_VREP
Definition: vf_signalstats.c:31
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:411
ff_set_common_formats
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:587
mask
static const uint16_t mask[17]
Definition: lzw.c:38
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
alloc_frame
static AVFrame * alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
Definition: vf_signalstats.c:143
SignalstatsContext::yuv_color
int yuv_color[3]
Definition: vf_signalstats.c:49
OFFSET
#define OFFSET(x)
Definition: vf_signalstats.c:70
SignalstatsContext::histsat
int * histsat
Definition: vf_signalstats.c:54
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:257
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:412
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
floor
static __device__ float floor(float a)
Definition: cuda_runtime.h:173
g
const char * g
Definition: vf_curves.c:117
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2033
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:309
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:396
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:410
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:540
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
f
#define f(width, name)
Definition: cbs_vp9.c:255
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
arg
const char * arg
Definition: jacosubdec.c:66
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
NULL
#define NULL
Definition: coverity.c:32
FILTER_BRNG
@ FILTER_BRNG
Definition: vf_signalstats.c:32
burn_frame16
static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:210
filter8_tout
static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:294
ThreadDataHueSatMetrics::dst_hue
AVFrame * dst_hue
Definition: vf_signalstats.c:67
SignalstatsContext::outfilter
int outfilter
Definition: vf_signalstats.c:45
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
AV_OPT_TYPE_COLOR
@ AV_OPT_TYPE_COLOR
Definition: opt.h:240
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:349
src
#define src
Definition: vp8dsp.c:255
AV_PIX_FMT_YUV440P10
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:401
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:400
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
SignalstatsContext::hsub
int hsub
Definition: vf_signalstats.c:40
name
const char * name
Definition: vf_signalstats.c:455
signalstats_inputs
static const AVFilterPad signalstats_inputs[]
Definition: vf_signalstats.c:997
filters_def
static const struct @235 filters_def[]
burn_frame8
static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:201
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:986
SignalstatsContext::histy
int * histy
Definition: vf_signalstats.c:54
ff_vf_signalstats
AVFilter ff_vf_signalstats
Definition: vf_signalstats.c:1015
FFMAX
#define FFMAX(a, b)
Definition: common.h:103
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:404
FILTER3
#define FILTER3(j)
hypot
static av_const double hypot(double x, double y)
Definition: libm.h:366
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:406
signalstats_outputs
static const AVFilterPad signalstats_outputs[]
Definition: vf_signalstats.c:1006
ThreadDataHueSatMetrics::dst_sat
AVFrame * dst_sat
Definition: vf_signalstats.c:67
FFMIN
#define FFMIN(a, b)
Definition: common.h:105
ThreadDataHueSatMetrics
Definition: vf_signalstats.c:65
M_PI
#define M_PI
Definition: mathematics.h:52
internal.h
compute_sat_hue_metrics8
static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:465
SignalstatsContext::filters
int filters
Definition: vf_signalstats.c:46
in
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
Definition: audio_convert.c:326
i
int i
Definition: input.c:407
process8
int(* process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:456
FILTER_TOUT
@ FILTER_TOUT
Definition: vf_signalstats.c:30
SignalstatsContext::frame_prev
AVFrame * frame_prev
Definition: vf_signalstats.c:47
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:802
SignalstatsContext::maxsize
int maxsize
Definition: vf_signalstats.c:53
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_signalstats.c:161
ThreadData
Used for passing data between threads.
Definition: dsddec.c:67
process16
int(* process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:457
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:100
uint8_t
uint8_t
Definition: audio_convert.c:194
SignalstatsContext::depth
int depth
Definition: vf_signalstats.c:42
filt
static const int8_t filt[NUMTAPS *2]
Definition: af_earwax.c:39
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:60
SignalstatsContext::chromaw
int chromaw
Definition: vf_signalstats.c:39
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:398
AVFilter
Filter definition.
Definition: avfilter.h:145
pixfmt
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:365
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
SignalstatsContext::cfs
int cfs
Definition: vf_signalstats.c:44
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:403
filter16_brng
static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:254
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:408
SignalstatsContext::frame_sat
AVFrame * frame_sat
Definition: vf_signalstats.c:56
FILTER_NONE
@ FILTER_NONE
Definition: vf_signalstats.c:29
signalstats_options
static const AVOption signalstats_options[]
Definition: vf_signalstats.c:73
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
filter8_vrep
static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:385
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_signalstats.c:119
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVFilterContext
An instance of a filter.
Definition: avfilter.h:341
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
filter16_vrep
static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:419
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
AV_OPT_TYPE_FLAGS
@ AV_OPT_TYPE_FLAGS
Definition: opt.h:224
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
SignalstatsContext::jobs_rets
int * jobs_rets
Definition: vf_signalstats.c:51
AV_PIX_FMT_YUV440P12
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:405
h
h
Definition: vp9dsp_template.c:2038
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:409
compute_bit_depth
static unsigned compute_bit_depth(uint16_t mask)
Definition: vf_signalstats.c:542
int
int
Definition: ffmpeg_filter.c:170
SignalstatsContext::frame_hue
AVFrame * frame_hue
Definition: vf_signalstats.c:57
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
snprintf
#define snprintf
Definition: snprintf.h:34
ThreadDataHueSatMetrics::src
const AVFrame * src
Definition: vf_signalstats.c:66
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:407
AV_WN16
#define AV_WN16(p, v)
Definition: intreadwrite.h:372