FFmpeg
vf_signalstats.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
3  * Copyright (c) 2014 Clément Bœsch
4  * Copyright (c) 2014 Dave Rice @dericed
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include "libavutil/intreadwrite.h"
24 #include "libavutil/opt.h"
25 #include "libavutil/pixdesc.h"
26 #include "internal.h"
27 
28 enum FilterMode {
34 };
35 
36 typedef struct SignalstatsContext {
37  const AVClass *class;
38  int chromah; // height of chroma plane
39  int chromaw; // width of chroma plane
40  int hsub; // horizontal subsampling
41  int vsub; // vertical subsampling
42  int depth; // pixel depth
43  int fs; // pixel count per frame
44  int cfs; // pixel count per frame of chroma planes
45  int outfilter; // FilterMode
46  int filters;
48  uint8_t rgba_color[4];
49  int yuv_color[3];
50  int nb_jobs;
51  int *jobs_rets;
52 
53  int maxsize; // history stats array size
54  int *histy, *histu, *histv, *histsat;
55 
59 
60 typedef struct ThreadData {
61  const AVFrame *in;
62  AVFrame *out;
63 } ThreadData;
64 
65 typedef struct ThreadDataHueSatMetrics {
66  const AVFrame *src;
69 
70 #define OFFSET(x) offsetof(SignalstatsContext, x)
71 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
72 
73 static const AVOption signalstats_options[] = {
74  {"stat", "set statistics filters", OFFSET(filters), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "filters"},
75  {"tout", "analyze pixels for temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_TOUT}, 0, 0, FLAGS, "filters"},
76  {"vrep", "analyze video lines for vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_VREP}, 0, 0, FLAGS, "filters"},
77  {"brng", "analyze for pixels outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_BRNG}, 0, 0, FLAGS, "filters"},
78  {"out", "set video filter", OFFSET(outfilter), AV_OPT_TYPE_INT, {.i64=FILTER_NONE}, -1, FILT_NUMB-1, FLAGS, "out"},
79  {"tout", "highlight pixels that depict temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_TOUT}, 0, 0, FLAGS, "out"},
80  {"vrep", "highlight video lines that depict vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_VREP}, 0, 0, FLAGS, "out"},
81  {"brng", "highlight pixels that are outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_BRNG}, 0, 0, FLAGS, "out"},
82  {"c", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
83  {"color", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
84  {NULL}
85 };
86 
87 AVFILTER_DEFINE_CLASS(signalstats);
88 
90 {
91  uint8_t r, g, b;
92  SignalstatsContext *s = ctx->priv;
93 
94  if (s->outfilter != FILTER_NONE)
95  s->filters |= 1 << s->outfilter;
96 
97  r = s->rgba_color[0];
98  g = s->rgba_color[1];
99  b = s->rgba_color[2];
100  s->yuv_color[0] = (( 66*r + 129*g + 25*b + (1<<7)) >> 8) + 16;
101  s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
102  s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
103  return 0;
104 }
105 
107 {
108  SignalstatsContext *s = ctx->priv;
109  av_frame_free(&s->frame_prev);
110  av_frame_free(&s->frame_sat);
111  av_frame_free(&s->frame_hue);
112  av_freep(&s->jobs_rets);
113  av_freep(&s->histy);
114  av_freep(&s->histu);
115  av_freep(&s->histv);
116  av_freep(&s->histsat);
117 }
118 
120 {
121  // TODO: add more
122  static const enum AVPixelFormat pix_fmts[] = {
135  };
136 
138 }
139 
140 static AVFrame *alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
141 {
143  if (!frame)
144  return NULL;
145 
146  frame->format = pixfmt;
147  frame->width = w;
148  frame->height = h;
149 
150  if (av_frame_get_buffer(frame, 0) < 0) {
152  return NULL;
153  }
154 
155  return frame;
156 }
157 
158 static int config_output(AVFilterLink *outlink)
159 {
160  AVFilterContext *ctx = outlink->src;
161  SignalstatsContext *s = ctx->priv;
162  AVFilterLink *inlink = outlink->src->inputs[0];
164  s->hsub = desc->log2_chroma_w;
165  s->vsub = desc->log2_chroma_h;
166  s->depth = desc->comp[0].depth;
167  s->maxsize = 1 << s->depth;
168  s->histy = av_malloc_array(s->maxsize, sizeof(*s->histy));
169  s->histu = av_malloc_array(s->maxsize, sizeof(*s->histu));
170  s->histv = av_malloc_array(s->maxsize, sizeof(*s->histv));
171  s->histsat = av_malloc_array(s->maxsize, sizeof(*s->histsat));
172 
173  if (!s->histy || !s->histu || !s->histv || !s->histsat)
174  return AVERROR(ENOMEM);
175 
176  outlink->w = inlink->w;
177  outlink->h = inlink->h;
178 
179  s->chromaw = AV_CEIL_RSHIFT(inlink->w, s->hsub);
180  s->chromah = AV_CEIL_RSHIFT(inlink->h, s->vsub);
181 
182  s->fs = inlink->w * inlink->h;
183  s->cfs = s->chromaw * s->chromah;
184 
185  s->nb_jobs = FFMAX(1, FFMIN(inlink->h, ff_filter_get_nb_threads(ctx)));
186  s->jobs_rets = av_malloc_array(s->nb_jobs, sizeof(*s->jobs_rets));
187  if (!s->jobs_rets)
188  return AVERROR(ENOMEM);
189 
190  s->frame_sat = alloc_frame(s->depth > 8 ? AV_PIX_FMT_GRAY16 : AV_PIX_FMT_GRAY8, inlink->w, inlink->h);
191  s->frame_hue = alloc_frame(AV_PIX_FMT_GRAY16, inlink->w, inlink->h);
192  if (!s->frame_sat || !s->frame_hue)
193  return AVERROR(ENOMEM);
194 
195  return 0;
196 }
197 
198 static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
199 {
200  const int chromax = x >> s->hsub;
201  const int chromay = y >> s->vsub;
202  f->data[0][y * f->linesize[0] + x] = s->yuv_color[0];
203  f->data[1][chromay * f->linesize[1] + chromax] = s->yuv_color[1];
204  f->data[2][chromay * f->linesize[2] + chromax] = s->yuv_color[2];
205 }
206 
207 static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
208 {
209  const int chromax = x >> s->hsub;
210  const int chromay = y >> s->vsub;
211  const int mult = 1 << (s->depth - 8);
212  AV_WN16(f->data[0] + y * f->linesize[0] + x * 2, s->yuv_color[0] * mult);
213  AV_WN16(f->data[1] + chromay * f->linesize[1] + chromax * 2, s->yuv_color[1] * mult);
214  AV_WN16(f->data[2] + chromay * f->linesize[2] + chromax * 2, s->yuv_color[2] * mult);
215 }
216 
217 static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
218 {
219  ThreadData *td = arg;
220  const SignalstatsContext *s = ctx->priv;
221  const AVFrame *in = td->in;
222  AVFrame *out = td->out;
223  const int w = in->width;
224  const int h = in->height;
225  const int slice_start = (h * jobnr ) / nb_jobs;
226  const int slice_end = (h * (jobnr+1)) / nb_jobs;
227  int x, y, score = 0;
228 
229  for (y = slice_start; y < slice_end; y++) {
230  const int yc = y >> s->vsub;
231  const uint8_t *pluma = &in->data[0][y * in->linesize[0]];
232  const uint8_t *pchromau = &in->data[1][yc * in->linesize[1]];
233  const uint8_t *pchromav = &in->data[2][yc * in->linesize[2]];
234 
235  for (x = 0; x < w; x++) {
236  const int xc = x >> s->hsub;
237  const int luma = pluma[x];
238  const int chromau = pchromau[xc];
239  const int chromav = pchromav[xc];
240  const int filt = luma < 16 || luma > 235 ||
241  chromau < 16 || chromau > 240 ||
242  chromav < 16 || chromav > 240;
243  score += filt;
244  if (out && filt)
245  burn_frame8(s, out, x, y);
246  }
247  }
248  return score;
249 }
250 
251 static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
252 {
253  ThreadData *td = arg;
254  const SignalstatsContext *s = ctx->priv;
255  const AVFrame *in = td->in;
256  AVFrame *out = td->out;
257  const int mult = 1 << (s->depth - 8);
258  const int w = in->width;
259  const int h = in->height;
260  const int slice_start = (h * jobnr ) / nb_jobs;
261  const int slice_end = (h * (jobnr+1)) / nb_jobs;
262  int x, y, score = 0;
263 
264  for (y = slice_start; y < slice_end; y++) {
265  const int yc = y >> s->vsub;
266  const uint16_t *pluma = (uint16_t *)&in->data[0][y * in->linesize[0]];
267  const uint16_t *pchromau = (uint16_t *)&in->data[1][yc * in->linesize[1]];
268  const uint16_t *pchromav = (uint16_t *)&in->data[2][yc * in->linesize[2]];
269 
270  for (x = 0; x < w; x++) {
271  const int xc = x >> s->hsub;
272  const int luma = pluma[x];
273  const int chromau = pchromau[xc];
274  const int chromav = pchromav[xc];
275  const int filt = luma < 16 * mult || luma > 235 * mult ||
276  chromau < 16 * mult || chromau > 240 * mult ||
277  chromav < 16 * mult || chromav > 240 * mult;
278  score += filt;
279  if (out && filt)
280  burn_frame16(s, out, x, y);
281  }
282  }
283  return score;
284 }
285 
286 static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
287 {
288  return ((abs(x - y) + abs (z - y)) / 2) - abs(z - x) > 4; // make 4 configurable?
289 }
290 
291 static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
292 {
293  ThreadData *td = arg;
294  const SignalstatsContext *s = ctx->priv;
295  const AVFrame *in = td->in;
296  AVFrame *out = td->out;
297  const int w = in->width;
298  const int h = in->height;
299  const int slice_start = (h * jobnr ) / nb_jobs;
300  const int slice_end = (h * (jobnr+1)) / nb_jobs;
301  const uint8_t *p = in->data[0];
302  int lw = in->linesize[0];
303  int x, y, score = 0, filt;
304 
305  for (y = slice_start; y < slice_end; y++) {
306 
307  if (y - 1 < 0 || y + 1 >= h)
308  continue;
309 
310  // detect two pixels above and below (to eliminate interlace artefacts)
311  // should check that video format is infact interlaced.
312 
313 #define FILTER(i, j) \
314  filter_tout_outlier(p[(y-j) * lw + x + i], \
315  p[ y * lw + x + i], \
316  p[(y+j) * lw + x + i])
317 
318 #define FILTER3(j) (FILTER(-1, j) && FILTER(0, j) && FILTER(1, j))
319 
320  if (y - 2 >= 0 && y + 2 < h) {
321  for (x = 1; x < w - 1; x++) {
322  filt = FILTER3(2) && FILTER3(1);
323  score += filt;
324  if (filt && out)
325  burn_frame8(s, out, x, y);
326  }
327  } else {
328  for (x = 1; x < w - 1; x++) {
329  filt = FILTER3(1);
330  score += filt;
331  if (filt && out)
332  burn_frame8(s, out, x, y);
333  }
334  }
335  }
336  return score;
337 }
338 
339 static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
340 {
341  ThreadData *td = arg;
342  const SignalstatsContext *s = ctx->priv;
343  const AVFrame *in = td->in;
344  AVFrame *out = td->out;
345  const int w = in->width;
346  const int h = in->height;
347  const int slice_start = (h * jobnr ) / nb_jobs;
348  const int slice_end = (h * (jobnr+1)) / nb_jobs;
349  const uint16_t *p = (uint16_t *)in->data[0];
350  int lw = in->linesize[0] / 2;
351  int x, y, score = 0, filt;
352 
353  for (y = slice_start; y < slice_end; y++) {
354 
355  if (y - 1 < 0 || y + 1 >= h)
356  continue;
357 
358  // detect two pixels above and below (to eliminate interlace artefacts)
359  // should check that video format is infact interlaced.
360 
361  if (y - 2 >= 0 && y + 2 < h) {
362  for (x = 1; x < w - 1; x++) {
363  filt = FILTER3(2) && FILTER3(1);
364  score += filt;
365  if (filt && out)
366  burn_frame16(s, out, x, y);
367  }
368  } else {
369  for (x = 1; x < w - 1; x++) {
370  filt = FILTER3(1);
371  score += filt;
372  if (filt && out)
373  burn_frame16(s, out, x, y);
374  }
375  }
376  }
377  return score;
378 }
379 
380 #define VREP_START 4
381 
382 static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
383 {
384  ThreadData *td = arg;
385  const SignalstatsContext *s = ctx->priv;
386  const AVFrame *in = td->in;
387  AVFrame *out = td->out;
388  const int w = in->width;
389  const int h = in->height;
390  const int slice_start = (h * jobnr ) / nb_jobs;
391  const int slice_end = (h * (jobnr+1)) / nb_jobs;
392  const uint8_t *p = in->data[0];
393  const int lw = in->linesize[0];
394  int x, y, score = 0;
395 
396  for (y = slice_start; y < slice_end; y++) {
397  const int y2lw = (y - VREP_START) * lw;
398  const int ylw = y * lw;
399  int filt, totdiff = 0;
400 
401  if (y < VREP_START)
402  continue;
403 
404  for (x = 0; x < w; x++)
405  totdiff += abs(p[y2lw + x] - p[ylw + x]);
406  filt = totdiff < w;
407 
408  score += filt;
409  if (filt && out)
410  for (x = 0; x < w; x++)
411  burn_frame8(s, out, x, y);
412  }
413  return score * w;
414 }
415 
416 static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
417 {
418  ThreadData *td = arg;
419  const SignalstatsContext *s = ctx->priv;
420  const AVFrame *in = td->in;
421  AVFrame *out = td->out;
422  const int w = in->width;
423  const int h = in->height;
424  const int slice_start = (h * jobnr ) / nb_jobs;
425  const int slice_end = (h * (jobnr+1)) / nb_jobs;
426  const uint16_t *p = (uint16_t *)in->data[0];
427  const int lw = in->linesize[0] / 2;
428  int x, y, score = 0;
429 
430  for (y = slice_start; y < slice_end; y++) {
431  const int y2lw = (y - VREP_START) * lw;
432  const int ylw = y * lw;
433  int64_t totdiff = 0;
434  int filt;
435 
436  if (y < VREP_START)
437  continue;
438 
439  for (x = 0; x < w; x++)
440  totdiff += abs(p[y2lw + x] - p[ylw + x]);
441  filt = totdiff < w;
442 
443  score += filt;
444  if (filt && out)
445  for (x = 0; x < w; x++)
446  burn_frame16(s, out, x, y);
447  }
448  return score * w;
449 }
450 
451 static const struct {
452  const char *name;
453  int (*process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
454  int (*process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
455 } filters_def[] = {
456  {"TOUT", filter8_tout, filter16_tout},
457  {"VREP", filter8_vrep, filter16_vrep},
458  {"BRNG", filter8_brng, filter16_brng},
459  {NULL}
460 };
461 
462 static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
463 {
464  int i, j;
466  const SignalstatsContext *s = ctx->priv;
467  const AVFrame *src = td->src;
468  AVFrame *dst_sat = td->dst_sat;
469  AVFrame *dst_hue = td->dst_hue;
470 
471  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
472  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
473 
474  const int lsz_u = src->linesize[1];
475  const int lsz_v = src->linesize[2];
476  const uint8_t *p_u = src->data[1] + slice_start * lsz_u;
477  const uint8_t *p_v = src->data[2] + slice_start * lsz_v;
478 
479  const int lsz_sat = dst_sat->linesize[0];
480  const int lsz_hue = dst_hue->linesize[0];
481  uint8_t *p_sat = dst_sat->data[0] + slice_start * lsz_sat;
482  uint8_t *p_hue = dst_hue->data[0] + slice_start * lsz_hue;
483 
484  for (j = slice_start; j < slice_end; j++) {
485  for (i = 0; i < s->chromaw; i++) {
486  const int yuvu = p_u[i];
487  const int yuvv = p_v[i];
488  p_sat[i] = hypotf(yuvu - 128, yuvv - 128); // int or round?
489  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-128, yuvv-128) + 180.f), 360.f);
490  }
491  p_u += lsz_u;
492  p_v += lsz_v;
493  p_sat += lsz_sat;
494  p_hue += lsz_hue;
495  }
496 
497  return 0;
498 }
499 
500 static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
501 {
502  int i, j;
504  const SignalstatsContext *s = ctx->priv;
505  const AVFrame *src = td->src;
506  AVFrame *dst_sat = td->dst_sat;
507  AVFrame *dst_hue = td->dst_hue;
508  const int mid = 1 << (s->depth - 1);
509 
510  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
511  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
512 
513  const int lsz_u = src->linesize[1] / 2;
514  const int lsz_v = src->linesize[2] / 2;
515  const uint16_t *p_u = (uint16_t*)src->data[1] + slice_start * lsz_u;
516  const uint16_t *p_v = (uint16_t*)src->data[2] + slice_start * lsz_v;
517 
518  const int lsz_sat = dst_sat->linesize[0] / 2;
519  const int lsz_hue = dst_hue->linesize[0] / 2;
520  uint16_t *p_sat = (uint16_t*)dst_sat->data[0] + slice_start * lsz_sat;
521  uint16_t *p_hue = (uint16_t*)dst_hue->data[0] + slice_start * lsz_hue;
522 
523  for (j = slice_start; j < slice_end; j++) {
524  for (i = 0; i < s->chromaw; i++) {
525  const int yuvu = p_u[i];
526  const int yuvv = p_v[i];
527  p_sat[i] = hypotf(yuvu - mid, yuvv - mid); // int or round?
528  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-mid, yuvv-mid) + 180.f), 360.f);
529  }
530  p_u += lsz_u;
531  p_v += lsz_v;
532  p_sat += lsz_sat;
533  p_hue += lsz_hue;
534  }
535 
536  return 0;
537 }
538 
539 static unsigned compute_bit_depth(uint16_t mask)
540 {
541  return av_popcount(mask);
542 }
543 
545 {
546  AVFilterContext *ctx = link->dst;
547  SignalstatsContext *s = ctx->priv;
548  AVFilterLink *outlink = ctx->outputs[0];
549  AVFrame *out = in;
550  int i, j;
551  int w = 0, cw = 0, // in
552  pw = 0, cpw = 0; // prev
553  int fil;
554  char metabuf[128];
555  unsigned int *histy = s->histy,
556  *histu = s->histu,
557  *histv = s->histv,
558  histhue[360] = {0},
559  *histsat = s->histsat;
560  int miny = -1, minu = -1, minv = -1;
561  int maxy = -1, maxu = -1, maxv = -1;
562  int lowy = -1, lowu = -1, lowv = -1;
563  int highy = -1, highu = -1, highv = -1;
564  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
565  int lowp, highp, clowp, chighp;
566  int accy, accu, accv;
567  int accsat, acchue = 0;
568  int medhue, maxhue;
569  int toty = 0, totu = 0, totv = 0, totsat=0;
570  int tothue = 0;
571  int dify = 0, difu = 0, difv = 0;
572  uint16_t masky = 0, masku = 0, maskv = 0;
573 
574  int filtot[FILT_NUMB] = {0};
575  AVFrame *prev;
576 
577  AVFrame *sat = s->frame_sat;
578  AVFrame *hue = s->frame_hue;
579  const uint8_t *p_sat = sat->data[0];
580  const uint8_t *p_hue = hue->data[0];
581  const int lsz_sat = sat->linesize[0];
582  const int lsz_hue = hue->linesize[0];
583  ThreadDataHueSatMetrics td_huesat = {
584  .src = in,
585  .dst_sat = sat,
586  .dst_hue = hue,
587  };
588 
589  if (!s->frame_prev)
590  s->frame_prev = av_frame_clone(in);
591 
592  prev = s->frame_prev;
593 
594  if (s->outfilter != FILTER_NONE) {
595  out = av_frame_clone(in);
597  }
598 
600  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
601 
602  // Calculate luma histogram and difference with previous frame or field.
603  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
604  for (j = 0; j < link->h; j++) {
605  for (i = 0; i < link->w; i++) {
606  const int yuv = in->data[0][w + i];
607 
608  masky |= yuv;
609  histy[yuv]++;
610  dify += abs(yuv - prev->data[0][pw + i]);
611  }
612  w += in->linesize[0];
613  pw += prev->linesize[0];
614  }
615 
616  // Calculate chroma histogram and difference with previous frame or field.
617  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
618  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
619  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
620  for (j = 0; j < s->chromah; j++) {
621  for (i = 0; i < s->chromaw; i++) {
622  const int yuvu = in->data[1][cw+i];
623  const int yuvv = in->data[2][cw+i];
624 
625  masku |= yuvu;
626  maskv |= yuvv;
627  histu[yuvu]++;
628  difu += abs(yuvu - prev->data[1][cpw+i]);
629  histv[yuvv]++;
630  difv += abs(yuvv - prev->data[2][cpw+i]);
631 
632  histsat[p_sat[i]]++;
633  histhue[((int16_t*)p_hue)[i]]++;
634  }
635  cw += in->linesize[1];
636  cpw += prev->linesize[1];
637  p_sat += lsz_sat;
638  p_hue += lsz_hue;
639  }
640 
641  for (fil = 0; fil < FILT_NUMB; fil ++) {
642  if (s->filters & 1<<fil) {
643  ThreadData td = {
644  .in = in,
645  .out = out != in && s->outfilter == fil ? out : NULL,
646  };
647  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
649  &td, s->jobs_rets, s->nb_jobs);
650  for (i = 0; i < s->nb_jobs; i++)
651  filtot[fil] += s->jobs_rets[i];
652  }
653  }
654 
655  // find low / high based on histogram percentile
656  // these only need to be calculated once.
657 
658  lowp = lrint(s->fs * 10 / 100.);
659  highp = lrint(s->fs * 90 / 100.);
660  clowp = lrint(s->cfs * 10 / 100.);
661  chighp = lrint(s->cfs * 90 / 100.);
662 
663  accy = accu = accv = accsat = 0;
664  for (fil = 0; fil < s->maxsize; fil++) {
665  if (miny < 0 && histy[fil]) miny = fil;
666  if (minu < 0 && histu[fil]) minu = fil;
667  if (minv < 0 && histv[fil]) minv = fil;
668  if (minsat < 0 && histsat[fil]) minsat = fil;
669 
670  if (histy[fil]) maxy = fil;
671  if (histu[fil]) maxu = fil;
672  if (histv[fil]) maxv = fil;
673  if (histsat[fil]) maxsat = fil;
674 
675  toty += histy[fil] * fil;
676  totu += histu[fil] * fil;
677  totv += histv[fil] * fil;
678  totsat += histsat[fil] * fil;
679 
680  accy += histy[fil];
681  accu += histu[fil];
682  accv += histv[fil];
683  accsat += histsat[fil];
684 
685  if (lowy == -1 && accy >= lowp) lowy = fil;
686  if (lowu == -1 && accu >= clowp) lowu = fil;
687  if (lowv == -1 && accv >= clowp) lowv = fil;
688  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
689 
690  if (highy == -1 && accy >= highp) highy = fil;
691  if (highu == -1 && accu >= chighp) highu = fil;
692  if (highv == -1 && accv >= chighp) highv = fil;
693  if (highsat == -1 && accsat >= chighp) highsat = fil;
694  }
695 
696  maxhue = histhue[0];
697  medhue = -1;
698  for (fil = 0; fil < 360; fil++) {
699  tothue += histhue[fil] * fil;
700  acchue += histhue[fil];
701 
702  if (medhue == -1 && acchue > s->cfs / 2)
703  medhue = fil;
704  if (histhue[fil] > maxhue) {
705  maxhue = histhue[fil];
706  }
707  }
708 
709  av_frame_free(&s->frame_prev);
710  s->frame_prev = av_frame_clone(in);
711 
712 #define SET_META(key, fmt, val) do { \
713  snprintf(metabuf, sizeof(metabuf), fmt, val); \
714  av_dict_set(&out->metadata, "lavfi.signalstats." key, metabuf, 0); \
715 } while (0)
716 
717  SET_META("YMIN", "%d", miny);
718  SET_META("YLOW", "%d", lowy);
719  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
720  SET_META("YHIGH", "%d", highy);
721  SET_META("YMAX", "%d", maxy);
722 
723  SET_META("UMIN", "%d", minu);
724  SET_META("ULOW", "%d", lowu);
725  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
726  SET_META("UHIGH", "%d", highu);
727  SET_META("UMAX", "%d", maxu);
728 
729  SET_META("VMIN", "%d", minv);
730  SET_META("VLOW", "%d", lowv);
731  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
732  SET_META("VHIGH", "%d", highv);
733  SET_META("VMAX", "%d", maxv);
734 
735  SET_META("SATMIN", "%d", minsat);
736  SET_META("SATLOW", "%d", lowsat);
737  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
738  SET_META("SATHIGH", "%d", highsat);
739  SET_META("SATMAX", "%d", maxsat);
740 
741  SET_META("HUEMED", "%d", medhue);
742  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
743 
744  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
745  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
746  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
747 
748  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
749  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
750  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
751 
752  for (fil = 0; fil < FILT_NUMB; fil ++) {
753  if (s->filters & 1<<fil) {
754  char metaname[128];
755  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
756  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
757  av_dict_set(&out->metadata, metaname, metabuf, 0);
758  }
759  }
760 
761  if (in != out)
762  av_frame_free(&in);
763  return ff_filter_frame(outlink, out);
764 }
765 
767 {
768  AVFilterContext *ctx = link->dst;
769  SignalstatsContext *s = ctx->priv;
770  AVFilterLink *outlink = ctx->outputs[0];
771  AVFrame *out = in;
772  int i, j;
773  int w = 0, cw = 0, // in
774  pw = 0, cpw = 0; // prev
775  int fil;
776  char metabuf[128];
777  unsigned int *histy = s->histy,
778  *histu = s->histu,
779  *histv = s->histv,
780  histhue[360] = {0},
781  *histsat = s->histsat;
782  int miny = -1, minu = -1, minv = -1;
783  int maxy = -1, maxu = -1, maxv = -1;
784  int lowy = -1, lowu = -1, lowv = -1;
785  int highy = -1, highu = -1, highv = -1;
786  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
787  int lowp, highp, clowp, chighp;
788  int accy, accu, accv;
789  int accsat, acchue = 0;
790  int medhue, maxhue;
791  int64_t toty = 0, totu = 0, totv = 0, totsat=0;
792  int64_t tothue = 0;
793  int64_t dify = 0, difu = 0, difv = 0;
794  uint16_t masky = 0, masku = 0, maskv = 0;
795 
796  int filtot[FILT_NUMB] = {0};
797  AVFrame *prev;
798 
799  AVFrame *sat = s->frame_sat;
800  AVFrame *hue = s->frame_hue;
801  const uint16_t *p_sat = (uint16_t *)sat->data[0];
802  const uint16_t *p_hue = (uint16_t *)hue->data[0];
803  const int lsz_sat = sat->linesize[0] / 2;
804  const int lsz_hue = hue->linesize[0] / 2;
805  ThreadDataHueSatMetrics td_huesat = {
806  .src = in,
807  .dst_sat = sat,
808  .dst_hue = hue,
809  };
810 
811  if (!s->frame_prev)
812  s->frame_prev = av_frame_clone(in);
813 
814  prev = s->frame_prev;
815 
816  if (s->outfilter != FILTER_NONE) {
817  out = av_frame_clone(in);
819  }
820 
822  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
823 
824  // Calculate luma histogram and difference with previous frame or field.
825  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
826  for (j = 0; j < link->h; j++) {
827  for (i = 0; i < link->w; i++) {
828  const int yuv = AV_RN16(in->data[0] + w + i * 2);
829 
830  masky |= yuv;
831  histy[yuv]++;
832  dify += abs(yuv - (int)AV_RN16(prev->data[0] + pw + i * 2));
833  }
834  w += in->linesize[0];
835  pw += prev->linesize[0];
836  }
837 
838  // Calculate chroma histogram and difference with previous frame or field.
839  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
840  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
841  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
842  for (j = 0; j < s->chromah; j++) {
843  for (i = 0; i < s->chromaw; i++) {
844  const int yuvu = AV_RN16(in->data[1] + cw + i * 2);
845  const int yuvv = AV_RN16(in->data[2] + cw + i * 2);
846 
847  masku |= yuvu;
848  maskv |= yuvv;
849  histu[yuvu]++;
850  difu += abs(yuvu - (int)AV_RN16(prev->data[1] + cpw + i * 2));
851  histv[yuvv]++;
852  difv += abs(yuvv - (int)AV_RN16(prev->data[2] + cpw + i * 2));
853 
854  histsat[p_sat[i]]++;
855  histhue[((int16_t*)p_hue)[i]]++;
856  }
857  cw += in->linesize[1];
858  cpw += prev->linesize[1];
859  p_sat += lsz_sat;
860  p_hue += lsz_hue;
861  }
862 
863  for (fil = 0; fil < FILT_NUMB; fil ++) {
864  if (s->filters & 1<<fil) {
865  ThreadData td = {
866  .in = in,
867  .out = out != in && s->outfilter == fil ? out : NULL,
868  };
869  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
871  &td, s->jobs_rets, s->nb_jobs);
872  for (i = 0; i < s->nb_jobs; i++)
873  filtot[fil] += s->jobs_rets[i];
874  }
875  }
876 
877  // find low / high based on histogram percentile
878  // these only need to be calculated once.
879 
880  lowp = lrint(s->fs * 10 / 100.);
881  highp = lrint(s->fs * 90 / 100.);
882  clowp = lrint(s->cfs * 10 / 100.);
883  chighp = lrint(s->cfs * 90 / 100.);
884 
885  accy = accu = accv = accsat = 0;
886  for (fil = 0; fil < s->maxsize; fil++) {
887  if (miny < 0 && histy[fil]) miny = fil;
888  if (minu < 0 && histu[fil]) minu = fil;
889  if (minv < 0 && histv[fil]) minv = fil;
890  if (minsat < 0 && histsat[fil]) minsat = fil;
891 
892  if (histy[fil]) maxy = fil;
893  if (histu[fil]) maxu = fil;
894  if (histv[fil]) maxv = fil;
895  if (histsat[fil]) maxsat = fil;
896 
897  toty += histy[fil] * fil;
898  totu += histu[fil] * fil;
899  totv += histv[fil] * fil;
900  totsat += histsat[fil] * fil;
901 
902  accy += histy[fil];
903  accu += histu[fil];
904  accv += histv[fil];
905  accsat += histsat[fil];
906 
907  if (lowy == -1 && accy >= lowp) lowy = fil;
908  if (lowu == -1 && accu >= clowp) lowu = fil;
909  if (lowv == -1 && accv >= clowp) lowv = fil;
910  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
911 
912  if (highy == -1 && accy >= highp) highy = fil;
913  if (highu == -1 && accu >= chighp) highu = fil;
914  if (highv == -1 && accv >= chighp) highv = fil;
915  if (highsat == -1 && accsat >= chighp) highsat = fil;
916  }
917 
918  maxhue = histhue[0];
919  medhue = -1;
920  for (fil = 0; fil < 360; fil++) {
921  tothue += histhue[fil] * fil;
922  acchue += histhue[fil];
923 
924  if (medhue == -1 && acchue > s->cfs / 2)
925  medhue = fil;
926  if (histhue[fil] > maxhue) {
927  maxhue = histhue[fil];
928  }
929  }
930 
931  av_frame_free(&s->frame_prev);
932  s->frame_prev = av_frame_clone(in);
933 
934  SET_META("YMIN", "%d", miny);
935  SET_META("YLOW", "%d", lowy);
936  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
937  SET_META("YHIGH", "%d", highy);
938  SET_META("YMAX", "%d", maxy);
939 
940  SET_META("UMIN", "%d", minu);
941  SET_META("ULOW", "%d", lowu);
942  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
943  SET_META("UHIGH", "%d", highu);
944  SET_META("UMAX", "%d", maxu);
945 
946  SET_META("VMIN", "%d", minv);
947  SET_META("VLOW", "%d", lowv);
948  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
949  SET_META("VHIGH", "%d", highv);
950  SET_META("VMAX", "%d", maxv);
951 
952  SET_META("SATMIN", "%d", minsat);
953  SET_META("SATLOW", "%d", lowsat);
954  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
955  SET_META("SATHIGH", "%d", highsat);
956  SET_META("SATMAX", "%d", maxsat);
957 
958  SET_META("HUEMED", "%d", medhue);
959  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
960 
961  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
962  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
963  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
964 
965  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
966  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
967  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
968 
969  for (fil = 0; fil < FILT_NUMB; fil ++) {
970  if (s->filters & 1<<fil) {
971  char metaname[128];
972  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
973  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
974  av_dict_set(&out->metadata, metaname, metabuf, 0);
975  }
976  }
977 
978  if (in != out)
979  av_frame_free(&in);
980  return ff_filter_frame(outlink, out);
981 }
982 
984 {
985  AVFilterContext *ctx = link->dst;
986  SignalstatsContext *s = ctx->priv;
987 
988  if (s->depth > 8)
989  return filter_frame16(link, in);
990  else
991  return filter_frame8(link, in);
992 }
993 
994 static const AVFilterPad signalstats_inputs[] = {
995  {
996  .name = "default",
997  .type = AVMEDIA_TYPE_VIDEO,
998  .filter_frame = filter_frame,
999  },
1000 };
1001 
1003  {
1004  .name = "default",
1005  .config_props = config_output,
1006  .type = AVMEDIA_TYPE_VIDEO,
1007  },
1008 };
1009 
1011  .name = "signalstats",
1012  .description = "Generate statistics from video analysis.",
1013  .init = init,
1014  .uninit = uninit,
1015  .query_formats = query_formats,
1016  .priv_size = sizeof(SignalstatsContext),
1019  .priv_class = &signalstats_class,
1020  .flags = AVFILTER_FLAG_SLICE_THREADS,
1021 };
td
#define td
Definition: regdef.h:70
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
filter_frame8
static int filter_frame8(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:544
r
const char * r
Definition: vf_curves.c:116
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
compute_sat_hue_metrics16
static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:500
SignalstatsContext::vsub
int vsub
Definition: vf_signalstats.c:41
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:246
SignalstatsContext::rgba_color
uint8_t rgba_color[4]
Definition: vf_signalstats.c:48
SignalstatsContext::chromah
int chromah
Definition: vf_signalstats.c:38
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1019
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_signalstats.c:89
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2540
SET_META
#define SET_META(key, fmt, val)
floorf
static __device__ float floorf(float a)
Definition: cuda_runtime.h:172
filter8_brng
static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:217
atan2f
#define atan2f(y, x)
Definition: libm.h:45
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
AV_RN16
#define AV_RN16(p)
Definition: intreadwrite.h:360
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:112
SignalstatsContext::fs
int fs
Definition: vf_signalstats.c:43
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:303
av_frame_make_writable
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:490
pixdesc.h
AVFrame::width
int width
Definition: frame.h:361
SignalstatsContext::nb_jobs
int nb_jobs
Definition: vf_signalstats.c:50
w
uint8_t w
Definition: llviddspenc.c:38
AVOption
AVOption.
Definition: opt.h:247
b
#define b
Definition: input.c:40
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:389
VREP_START
#define VREP_START
Definition: vf_signalstats.c:380
SignalstatsContext::histv
int * histv
Definition: vf_signalstats.c:54
ThreadData::in
const AVFrame * in
Definition: vf_signalstats.c:61
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(signalstats)
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:153
av_popcount
#define av_popcount
Definition: common.h:150
ThreadData::out
AVFrame * out
Definition: af_adeclick.c:492
filter_frame16
static int filter_frame16(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:766
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:317
FLAGS
#define FLAGS
Definition: vf_signalstats.c:71
FilterMode
FilterMode
Definition: vp9.h:64
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:387
SignalstatsContext::histu
int * histu
Definition: vf_signalstats.c:54
filter16_tout
static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:339
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:373
filter_tout_outlier
static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
Definition: vf_signalstats.c:286
FILT_NUMB
@ FILT_NUMB
Definition: vf_signalstats.c:33
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:50
filters
#define filters(fmt, inverse, clip, i, c)
Definition: af_crystalizer.c:239
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:99
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:392
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:248
SignalstatsContext
Definition: vf_signalstats.c:36
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_signalstats.c:106
mult
static int16_t mult(Float11 *f1, Float11 *f2)
Definition: g726.c:56
filters_def
static const struct @232 filters_def[]
lrint
#define lrint
Definition: tablegen.h:53
FILTER_VREP
@ FILTER_VREP
Definition: vf_signalstats.c:31
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:401
mask
static const uint16_t mask[17]
Definition: lzw.c:38
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
alloc_frame
static AVFrame * alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
Definition: vf_signalstats.c:140
SignalstatsContext::yuv_color
int yuv_color[3]
Definition: vf_signalstats.c:49
OFFSET
#define OFFSET(x)
Definition: vf_signalstats.c:70
SignalstatsContext::histsat
int * histsat
Definition: vf_signalstats.c:54
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:257
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:402
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:51
g
const char * g
Definition: vf_curves.c:117
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2037
ff_set_common_formats_from_list
int ff_set_common_formats_from_list(AVFilterContext *ctx, const int *fmts)
Equivalent to ff_set_common_formats(ctx, ff_make_format_list(fmts))
Definition: formats.c:703
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:290
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:386
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:400
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:424
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
f
#define f(width, name)
Definition: cbs_vp9.c:255
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:152
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
arg
const char * arg
Definition: jacosubdec.c:67
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
FILTER_BRNG
@ FILTER_BRNG
Definition: vf_signalstats.c:32
burn_frame16
static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:207
filter8_tout
static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:291
ThreadDataHueSatMetrics::dst_hue
AVFrame * dst_hue
Definition: vf_signalstats.c:67
SignalstatsContext::outfilter
int outfilter
Definition: vf_signalstats.c:45
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
AV_OPT_TYPE_COLOR
@ AV_OPT_TYPE_COLOR
Definition: opt.h:239
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:354
src
#define src
Definition: vp8dsp.c:255
AV_PIX_FMT_YUV440P10
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:391
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:390
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
ff_vf_signalstats
const AVFilter ff_vf_signalstats
Definition: vf_signalstats.c:1010
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
SignalstatsContext::hsub
int hsub
Definition: vf_signalstats.c:40
name
const char * name
Definition: vf_signalstats.c:452
signalstats_inputs
static const AVFilterPad signalstats_inputs[]
Definition: vf_signalstats.c:994
burn_frame8
static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:198
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:983
SignalstatsContext::histy
int * histy
Definition: vf_signalstats.c:54
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:394
FILTER3
#define FILTER3(j)
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:396
signalstats_outputs
static const AVFilterPad signalstats_outputs[]
Definition: vf_signalstats.c:1002
ThreadDataHueSatMetrics::dst_sat
AVFrame * dst_sat
Definition: vf_signalstats.c:67
ThreadDataHueSatMetrics
Definition: vf_signalstats.c:65
M_PI
#define M_PI
Definition: mathematics.h:52
internal.h
compute_sat_hue_metrics8
static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:462
SignalstatsContext::filters
int filters
Definition: vf_signalstats.c:46
i
int i
Definition: input.c:406
process8
int(* process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:453
FILTER_TOUT
@ FILTER_TOUT
Definition: vf_signalstats.c:30
SignalstatsContext::frame_prev
AVFrame * frame_prev
Definition: vf_signalstats.c:47
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:804
SignalstatsContext::maxsize
int maxsize
Definition: vf_signalstats.c:53
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_signalstats.c:158
ThreadData
Used for passing data between threads.
Definition: dsddec.c:67
process16
int(* process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:454
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:100
SignalstatsContext::depth
int depth
Definition: vf_signalstats.c:42
filt
static const int8_t filt[NUMTAPS *2]
Definition: af_earwax.c:39
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:56
SignalstatsContext::chromaw
int chromaw
Definition: vf_signalstats.c:39
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:388
AVFilter
Filter definition.
Definition: avfilter.h:149
pixfmt
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:365
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
SignalstatsContext::cfs
int cfs
Definition: vf_signalstats.c:44
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:393
filter16_brng
static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:251
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:398
AVFrame::height
int height
Definition: frame.h:361
SignalstatsContext::frame_sat
AVFrame * frame_sat
Definition: vf_signalstats.c:56
FILTER_NONE
@ FILTER_NONE
Definition: vf_signalstats.c:29
signalstats_options
static const AVOption signalstats_options[]
Definition: vf_signalstats.c:73
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:224
filter8_vrep
static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:382
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_signalstats.c:119
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVFilterContext
An instance of a filter.
Definition: avfilter.h:346
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:121
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
filter16_vrep
static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:416
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:153
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
AV_OPT_TYPE_FLAGS
@ AV_OPT_TYPE_FLAGS
Definition: opt.h:223
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:334
SignalstatsContext::jobs_rets
int * jobs_rets
Definition: vf_signalstats.c:51
AV_PIX_FMT_YUV440P12
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:395
h
h
Definition: vp9dsp_template.c:2038
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:399
ff_filter_execute
static av_always_inline int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: internal.h:143
compute_bit_depth
static unsigned compute_bit_depth(uint16_t mask)
Definition: vf_signalstats.c:539
int
int
Definition: ffmpeg_filter.c:156
SignalstatsContext::frame_hue
AVFrame * frame_hue
Definition: vf_signalstats.c:57
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:233
snprintf
#define snprintf
Definition: snprintf.h:34
ThreadDataHueSatMetrics::src
const AVFrame * src
Definition: vf_signalstats.c:66
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:397
AV_WN16
#define AV_WN16(p, v)
Definition: intreadwrite.h:372