FFmpeg
vf_signalstats.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
3  * Copyright (c) 2014 Clément Bœsch
4  * Copyright (c) 2014 Dave Rice @dericed
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include "libavutil/intreadwrite.h"
24 #include "libavutil/opt.h"
25 #include "libavutil/pixdesc.h"
26 #include "filters.h"
27 #include "internal.h"
28 
29 enum FilterMode {
35 };
36 
37 typedef struct SignalstatsContext {
38  const AVClass *class;
39  int chromah; // height of chroma plane
40  int chromaw; // width of chroma plane
41  int hsub; // horizontal subsampling
42  int vsub; // vertical subsampling
43  int depth; // pixel depth
44  int fs; // pixel count per frame
45  int cfs; // pixel count per frame of chroma planes
46  int outfilter; // FilterMode
47  int filters;
49  uint8_t rgba_color[4];
50  int yuv_color[3];
51  int nb_jobs;
52  int *jobs_rets;
53 
54  int maxsize; // history stats array size
55  int *histy, *histu, *histv, *histsat;
56 
60 
61 typedef struct ThreadData {
62  const AVFrame *in;
63  AVFrame *out;
64 } ThreadData;
65 
66 typedef struct ThreadDataHueSatMetrics {
67  const AVFrame *src;
70 
71 #define OFFSET(x) offsetof(SignalstatsContext, x)
72 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
73 
74 static const AVOption signalstats_options[] = {
75  {"stat", "set statistics filters", OFFSET(filters), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, .unit = "filters"},
76  {"tout", "analyze pixels for temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_TOUT}, 0, 0, FLAGS, .unit = "filters"},
77  {"vrep", "analyze video lines for vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_VREP}, 0, 0, FLAGS, .unit = "filters"},
78  {"brng", "analyze for pixels outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_BRNG}, 0, 0, FLAGS, .unit = "filters"},
79  {"out", "set video filter", OFFSET(outfilter), AV_OPT_TYPE_INT, {.i64=FILTER_NONE}, -1, FILT_NUMB-1, FLAGS, .unit = "out"},
80  {"tout", "highlight pixels that depict temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_TOUT}, 0, 0, FLAGS, .unit = "out"},
81  {"vrep", "highlight video lines that depict vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_VREP}, 0, 0, FLAGS, .unit = "out"},
82  {"brng", "highlight pixels that are outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_BRNG}, 0, 0, FLAGS, .unit = "out"},
83  {"c", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
84  {"color", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
85  {NULL}
86 };
87 
88 AVFILTER_DEFINE_CLASS(signalstats);
89 
91 {
92  uint8_t r, g, b;
93  SignalstatsContext *s = ctx->priv;
94 
95  if (s->outfilter != FILTER_NONE)
96  s->filters |= 1 << s->outfilter;
97 
98  r = s->rgba_color[0];
99  g = s->rgba_color[1];
100  b = s->rgba_color[2];
101  s->yuv_color[0] = (( 66*r + 129*g + 25*b + (1<<7)) >> 8) + 16;
102  s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
103  s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
104  return 0;
105 }
106 
108 {
109  SignalstatsContext *s = ctx->priv;
110  av_frame_free(&s->frame_prev);
111  av_frame_free(&s->frame_sat);
112  av_frame_free(&s->frame_hue);
113  av_freep(&s->jobs_rets);
114  av_freep(&s->histy);
115  av_freep(&s->histu);
116  av_freep(&s->histv);
117  av_freep(&s->histsat);
118 }
119 
120 // TODO: add more
121 static const enum AVPixelFormat pix_fmts[] = {
134 };
135 
136 static AVFrame *alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
137 {
139  if (!frame)
140  return NULL;
141 
142  frame->format = pixfmt;
143  frame->width = w;
144  frame->height = h;
145 
146  if (av_frame_get_buffer(frame, 0) < 0) {
148  return NULL;
149  }
150 
151  return frame;
152 }
153 
154 static int config_output(AVFilterLink *outlink)
155 {
156  AVFilterContext *ctx = outlink->src;
157  SignalstatsContext *s = ctx->priv;
158  AVFilterLink *inlink = outlink->src->inputs[0];
160  s->hsub = desc->log2_chroma_w;
161  s->vsub = desc->log2_chroma_h;
162  s->depth = desc->comp[0].depth;
163  s->maxsize = 1 << s->depth;
164  s->histy = av_malloc_array(s->maxsize, sizeof(*s->histy));
165  s->histu = av_malloc_array(s->maxsize, sizeof(*s->histu));
166  s->histv = av_malloc_array(s->maxsize, sizeof(*s->histv));
167  s->histsat = av_malloc_array(s->maxsize, sizeof(*s->histsat));
168 
169  if (!s->histy || !s->histu || !s->histv || !s->histsat)
170  return AVERROR(ENOMEM);
171 
172  outlink->w = inlink->w;
173  outlink->h = inlink->h;
174 
175  s->chromaw = AV_CEIL_RSHIFT(inlink->w, s->hsub);
176  s->chromah = AV_CEIL_RSHIFT(inlink->h, s->vsub);
177 
178  s->fs = inlink->w * inlink->h;
179  s->cfs = s->chromaw * s->chromah;
180 
181  s->nb_jobs = FFMAX(1, FFMIN(inlink->h, ff_filter_get_nb_threads(ctx)));
182  s->jobs_rets = av_malloc_array(s->nb_jobs, sizeof(*s->jobs_rets));
183  if (!s->jobs_rets)
184  return AVERROR(ENOMEM);
185 
186  s->frame_sat = alloc_frame(s->depth > 8 ? AV_PIX_FMT_GRAY16 : AV_PIX_FMT_GRAY8, inlink->w, inlink->h);
187  s->frame_hue = alloc_frame(AV_PIX_FMT_GRAY16, inlink->w, inlink->h);
188  if (!s->frame_sat || !s->frame_hue)
189  return AVERROR(ENOMEM);
190 
191  return 0;
192 }
193 
194 static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
195 {
196  const int chromax = x >> s->hsub;
197  const int chromay = y >> s->vsub;
198  f->data[0][y * f->linesize[0] + x] = s->yuv_color[0];
199  f->data[1][chromay * f->linesize[1] + chromax] = s->yuv_color[1];
200  f->data[2][chromay * f->linesize[2] + chromax] = s->yuv_color[2];
201 }
202 
203 static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
204 {
205  const int chromax = x >> s->hsub;
206  const int chromay = y >> s->vsub;
207  const int mult = 1 << (s->depth - 8);
208  AV_WN16(f->data[0] + y * f->linesize[0] + x * 2, s->yuv_color[0] * mult);
209  AV_WN16(f->data[1] + chromay * f->linesize[1] + chromax * 2, s->yuv_color[1] * mult);
210  AV_WN16(f->data[2] + chromay * f->linesize[2] + chromax * 2, s->yuv_color[2] * mult);
211 }
212 
213 static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
214 {
215  ThreadData *td = arg;
216  const SignalstatsContext *s = ctx->priv;
217  const AVFrame *in = td->in;
218  AVFrame *out = td->out;
219  const int w = in->width;
220  const int h = in->height;
221  const int slice_start = (h * jobnr ) / nb_jobs;
222  const int slice_end = (h * (jobnr+1)) / nb_jobs;
223  int x, y, score = 0;
224 
225  for (y = slice_start; y < slice_end; y++) {
226  const int yc = y >> s->vsub;
227  const uint8_t *pluma = &in->data[0][y * in->linesize[0]];
228  const uint8_t *pchromau = &in->data[1][yc * in->linesize[1]];
229  const uint8_t *pchromav = &in->data[2][yc * in->linesize[2]];
230 
231  for (x = 0; x < w; x++) {
232  const int xc = x >> s->hsub;
233  const int luma = pluma[x];
234  const int chromau = pchromau[xc];
235  const int chromav = pchromav[xc];
236  const int filt = luma < 16 || luma > 235 ||
237  chromau < 16 || chromau > 240 ||
238  chromav < 16 || chromav > 240;
239  score += filt;
240  if (out && filt)
241  burn_frame8(s, out, x, y);
242  }
243  }
244  return score;
245 }
246 
247 static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
248 {
249  ThreadData *td = arg;
250  const SignalstatsContext *s = ctx->priv;
251  const AVFrame *in = td->in;
252  AVFrame *out = td->out;
253  const int mult = 1 << (s->depth - 8);
254  const int w = in->width;
255  const int h = in->height;
256  const int slice_start = (h * jobnr ) / nb_jobs;
257  const int slice_end = (h * (jobnr+1)) / nb_jobs;
258  int x, y, score = 0;
259 
260  for (y = slice_start; y < slice_end; y++) {
261  const int yc = y >> s->vsub;
262  const uint16_t *pluma = (uint16_t *)&in->data[0][y * in->linesize[0]];
263  const uint16_t *pchromau = (uint16_t *)&in->data[1][yc * in->linesize[1]];
264  const uint16_t *pchromav = (uint16_t *)&in->data[2][yc * in->linesize[2]];
265 
266  for (x = 0; x < w; x++) {
267  const int xc = x >> s->hsub;
268  const int luma = pluma[x];
269  const int chromau = pchromau[xc];
270  const int chromav = pchromav[xc];
271  const int filt = luma < 16 * mult || luma > 235 * mult ||
272  chromau < 16 * mult || chromau > 240 * mult ||
273  chromav < 16 * mult || chromav > 240 * mult;
274  score += filt;
275  if (out && filt)
276  burn_frame16(s, out, x, y);
277  }
278  }
279  return score;
280 }
281 
282 static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
283 {
284  return ((abs(x - y) + abs (z - y)) / 2) - abs(z - x) > 4; // make 4 configurable?
285 }
286 
287 static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
288 {
289  ThreadData *td = arg;
290  const SignalstatsContext *s = ctx->priv;
291  const AVFrame *in = td->in;
292  AVFrame *out = td->out;
293  const int w = in->width;
294  const int h = in->height;
295  const int slice_start = (h * jobnr ) / nb_jobs;
296  const int slice_end = (h * (jobnr+1)) / nb_jobs;
297  const uint8_t *p = in->data[0];
298  int lw = in->linesize[0];
299  int x, y, score = 0, filt;
300 
301  for (y = slice_start; y < slice_end; y++) {
302 
303  if (y - 1 < 0 || y + 1 >= h)
304  continue;
305 
306  // detect two pixels above and below (to eliminate interlace artefacts)
307  // should check that video format is infact interlaced.
308 
309 #define FILTER(i, j) \
310  filter_tout_outlier(p[(y-j) * lw + x + i], \
311  p[ y * lw + x + i], \
312  p[(y+j) * lw + x + i])
313 
314 #define FILTER3(j) (FILTER(-1, j) && FILTER(0, j) && FILTER(1, j))
315 
316  if (y - 2 >= 0 && y + 2 < h) {
317  for (x = 1; x < w - 1; x++) {
318  filt = FILTER3(2) && FILTER3(1);
319  score += filt;
320  if (filt && out)
321  burn_frame8(s, out, x, y);
322  }
323  } else {
324  for (x = 1; x < w - 1; x++) {
325  filt = FILTER3(1);
326  score += filt;
327  if (filt && out)
328  burn_frame8(s, out, x, y);
329  }
330  }
331  }
332  return score;
333 }
334 
335 static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
336 {
337  ThreadData *td = arg;
338  const SignalstatsContext *s = ctx->priv;
339  const AVFrame *in = td->in;
340  AVFrame *out = td->out;
341  const int w = in->width;
342  const int h = in->height;
343  const int slice_start = (h * jobnr ) / nb_jobs;
344  const int slice_end = (h * (jobnr+1)) / nb_jobs;
345  const uint16_t *p = (uint16_t *)in->data[0];
346  int lw = in->linesize[0] / 2;
347  int x, y, score = 0, filt;
348 
349  for (y = slice_start; y < slice_end; y++) {
350 
351  if (y - 1 < 0 || y + 1 >= h)
352  continue;
353 
354  // detect two pixels above and below (to eliminate interlace artefacts)
355  // should check that video format is infact interlaced.
356 
357  if (y - 2 >= 0 && y + 2 < h) {
358  for (x = 1; x < w - 1; x++) {
359  filt = FILTER3(2) && FILTER3(1);
360  score += filt;
361  if (filt && out)
362  burn_frame16(s, out, x, y);
363  }
364  } else {
365  for (x = 1; x < w - 1; x++) {
366  filt = FILTER3(1);
367  score += filt;
368  if (filt && out)
369  burn_frame16(s, out, x, y);
370  }
371  }
372  }
373  return score;
374 }
375 
376 #define VREP_START 4
377 
378 static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
379 {
380  ThreadData *td = arg;
381  const SignalstatsContext *s = ctx->priv;
382  const AVFrame *in = td->in;
383  AVFrame *out = td->out;
384  const int w = in->width;
385  const int h = in->height;
386  const int slice_start = (h * jobnr ) / nb_jobs;
387  const int slice_end = (h * (jobnr+1)) / nb_jobs;
388  const uint8_t *p = in->data[0];
389  const int lw = in->linesize[0];
390  int x, y, score = 0;
391 
392  for (y = slice_start; y < slice_end; y++) {
393  const int y2lw = (y - VREP_START) * lw;
394  const int ylw = y * lw;
395  int filt, totdiff = 0;
396 
397  if (y < VREP_START)
398  continue;
399 
400  for (x = 0; x < w; x++)
401  totdiff += abs(p[y2lw + x] - p[ylw + x]);
402  filt = totdiff < w;
403 
404  score += filt;
405  if (filt && out)
406  for (x = 0; x < w; x++)
407  burn_frame8(s, out, x, y);
408  }
409  return score * w;
410 }
411 
412 static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
413 {
414  ThreadData *td = arg;
415  const SignalstatsContext *s = ctx->priv;
416  const AVFrame *in = td->in;
417  AVFrame *out = td->out;
418  const int w = in->width;
419  const int h = in->height;
420  const int slice_start = (h * jobnr ) / nb_jobs;
421  const int slice_end = (h * (jobnr+1)) / nb_jobs;
422  const uint16_t *p = (uint16_t *)in->data[0];
423  const int lw = in->linesize[0] / 2;
424  int x, y, score = 0;
425 
426  for (y = slice_start; y < slice_end; y++) {
427  const int y2lw = (y - VREP_START) * lw;
428  const int ylw = y * lw;
429  int64_t totdiff = 0;
430  int filt;
431 
432  if (y < VREP_START)
433  continue;
434 
435  for (x = 0; x < w; x++)
436  totdiff += abs(p[y2lw + x] - p[ylw + x]);
437  filt = totdiff < w;
438 
439  score += filt;
440  if (filt && out)
441  for (x = 0; x < w; x++)
442  burn_frame16(s, out, x, y);
443  }
444  return score * w;
445 }
446 
447 static const struct {
448  const char *name;
449  int (*process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
450  int (*process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
451 } filters_def[] = {
452  {"TOUT", filter8_tout, filter16_tout},
453  {"VREP", filter8_vrep, filter16_vrep},
454  {"BRNG", filter8_brng, filter16_brng},
455  {NULL}
456 };
457 
458 static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
459 {
460  int i, j;
462  const SignalstatsContext *s = ctx->priv;
463  const AVFrame *src = td->src;
464  AVFrame *dst_sat = td->dst_sat;
465  AVFrame *dst_hue = td->dst_hue;
466 
467  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
468  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
469 
470  const int lsz_u = src->linesize[1];
471  const int lsz_v = src->linesize[2];
472  const uint8_t *p_u = src->data[1] + slice_start * lsz_u;
473  const uint8_t *p_v = src->data[2] + slice_start * lsz_v;
474 
475  const int lsz_sat = dst_sat->linesize[0];
476  const int lsz_hue = dst_hue->linesize[0];
477  uint8_t *p_sat = dst_sat->data[0] + slice_start * lsz_sat;
478  uint8_t *p_hue = dst_hue->data[0] + slice_start * lsz_hue;
479 
480  for (j = slice_start; j < slice_end; j++) {
481  for (i = 0; i < s->chromaw; i++) {
482  const int yuvu = p_u[i];
483  const int yuvv = p_v[i];
484  p_sat[i] = hypotf(yuvu - 128, yuvv - 128); // int or round?
485  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-128, yuvv-128) + 180.f), 360.f);
486  }
487  p_u += lsz_u;
488  p_v += lsz_v;
489  p_sat += lsz_sat;
490  p_hue += lsz_hue;
491  }
492 
493  return 0;
494 }
495 
496 static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
497 {
498  int i, j;
500  const SignalstatsContext *s = ctx->priv;
501  const AVFrame *src = td->src;
502  AVFrame *dst_sat = td->dst_sat;
503  AVFrame *dst_hue = td->dst_hue;
504  const int mid = 1 << (s->depth - 1);
505 
506  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
507  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
508 
509  const int lsz_u = src->linesize[1] / 2;
510  const int lsz_v = src->linesize[2] / 2;
511  const uint16_t *p_u = (uint16_t*)src->data[1] + slice_start * lsz_u;
512  const uint16_t *p_v = (uint16_t*)src->data[2] + slice_start * lsz_v;
513 
514  const int lsz_sat = dst_sat->linesize[0] / 2;
515  const int lsz_hue = dst_hue->linesize[0] / 2;
516  uint16_t *p_sat = (uint16_t*)dst_sat->data[0] + slice_start * lsz_sat;
517  uint16_t *p_hue = (uint16_t*)dst_hue->data[0] + slice_start * lsz_hue;
518 
519  for (j = slice_start; j < slice_end; j++) {
520  for (i = 0; i < s->chromaw; i++) {
521  const int yuvu = p_u[i];
522  const int yuvv = p_v[i];
523  p_sat[i] = hypotf(yuvu - mid, yuvv - mid); // int or round?
524  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-mid, yuvv-mid) + 180.f), 360.f);
525  }
526  p_u += lsz_u;
527  p_v += lsz_v;
528  p_sat += lsz_sat;
529  p_hue += lsz_hue;
530  }
531 
532  return 0;
533 }
534 
535 static unsigned compute_bit_depth(uint16_t mask)
536 {
537  return av_popcount(mask);
538 }
539 
541 {
542  AVFilterContext *ctx = link->dst;
543  SignalstatsContext *s = ctx->priv;
544  AVFilterLink *outlink = ctx->outputs[0];
545  AVFrame *out = in;
546  int i, j;
547  int w = 0, cw = 0, // in
548  pw = 0, cpw = 0; // prev
549  int fil;
550  char metabuf[128];
551  unsigned int *histy = s->histy,
552  *histu = s->histu,
553  *histv = s->histv,
554  histhue[360] = {0},
555  *histsat = s->histsat;
556  int miny = -1, minu = -1, minv = -1;
557  int maxy = -1, maxu = -1, maxv = -1;
558  int lowy = -1, lowu = -1, lowv = -1;
559  int highy = -1, highu = -1, highv = -1;
560  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
561  int lowp, highp, clowp, chighp;
562  int accy, accu, accv;
563  int accsat, acchue = 0;
564  int medhue, maxhue;
565  int toty = 0, totu = 0, totv = 0, totsat=0;
566  int tothue = 0;
567  int dify = 0, difu = 0, difv = 0;
568  uint16_t masky = 0, masku = 0, maskv = 0;
569  int ret;
570  int filtot[FILT_NUMB] = {0};
571  AVFrame *prev;
572 
573  AVFrame *sat = s->frame_sat;
574  AVFrame *hue = s->frame_hue;
575  const uint8_t *p_sat = sat->data[0];
576  const uint8_t *p_hue = hue->data[0];
577  const int lsz_sat = sat->linesize[0];
578  const int lsz_hue = hue->linesize[0];
579  ThreadDataHueSatMetrics td_huesat = {
580  .src = in,
581  .dst_sat = sat,
582  .dst_hue = hue,
583  };
584 
585  if (!s->frame_prev)
586  s->frame_prev = av_frame_clone(in);
587 
588  prev = s->frame_prev;
589 
590  if (s->outfilter != FILTER_NONE) {
591  out = av_frame_clone(in);
592  if (!out) {
593  av_frame_free(&in);
594  return AVERROR(ENOMEM);
595  }
597  if (ret < 0) {
598  av_frame_free(&out);
599  av_frame_free(&in);
600  return ret;
601  }
602  }
603 
605  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
606 
607  // Calculate luma histogram and difference with previous frame or field.
608  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
609  for (j = 0; j < link->h; j++) {
610  for (i = 0; i < link->w; i++) {
611  const int yuv = in->data[0][w + i];
612 
613  masky |= yuv;
614  histy[yuv]++;
615  dify += abs(yuv - prev->data[0][pw + i]);
616  }
617  w += in->linesize[0];
618  pw += prev->linesize[0];
619  }
620 
621  // Calculate chroma histogram and difference with previous frame or field.
622  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
623  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
624  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
625  for (j = 0; j < s->chromah; j++) {
626  for (i = 0; i < s->chromaw; i++) {
627  const int yuvu = in->data[1][cw+i];
628  const int yuvv = in->data[2][cw+i];
629 
630  masku |= yuvu;
631  maskv |= yuvv;
632  histu[yuvu]++;
633  difu += abs(yuvu - prev->data[1][cpw+i]);
634  histv[yuvv]++;
635  difv += abs(yuvv - prev->data[2][cpw+i]);
636 
637  histsat[p_sat[i]]++;
638  histhue[((int16_t*)p_hue)[i]]++;
639  }
640  cw += in->linesize[1];
641  cpw += prev->linesize[1];
642  p_sat += lsz_sat;
643  p_hue += lsz_hue;
644  }
645 
646  for (fil = 0; fil < FILT_NUMB; fil ++) {
647  if (s->filters & 1<<fil) {
648  ThreadData td = {
649  .in = in,
650  .out = out != in && s->outfilter == fil ? out : NULL,
651  };
652  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
654  &td, s->jobs_rets, s->nb_jobs);
655  for (i = 0; i < s->nb_jobs; i++)
656  filtot[fil] += s->jobs_rets[i];
657  }
658  }
659 
660  // find low / high based on histogram percentile
661  // these only need to be calculated once.
662 
663  lowp = lrint(s->fs * 10 / 100.);
664  highp = lrint(s->fs * 90 / 100.);
665  clowp = lrint(s->cfs * 10 / 100.);
666  chighp = lrint(s->cfs * 90 / 100.);
667 
668  accy = accu = accv = accsat = 0;
669  for (fil = 0; fil < s->maxsize; fil++) {
670  if (miny < 0 && histy[fil]) miny = fil;
671  if (minu < 0 && histu[fil]) minu = fil;
672  if (minv < 0 && histv[fil]) minv = fil;
673  if (minsat < 0 && histsat[fil]) minsat = fil;
674 
675  if (histy[fil]) maxy = fil;
676  if (histu[fil]) maxu = fil;
677  if (histv[fil]) maxv = fil;
678  if (histsat[fil]) maxsat = fil;
679 
680  toty += histy[fil] * fil;
681  totu += histu[fil] * fil;
682  totv += histv[fil] * fil;
683  totsat += histsat[fil] * fil;
684 
685  accy += histy[fil];
686  accu += histu[fil];
687  accv += histv[fil];
688  accsat += histsat[fil];
689 
690  if (lowy == -1 && accy >= lowp) lowy = fil;
691  if (lowu == -1 && accu >= clowp) lowu = fil;
692  if (lowv == -1 && accv >= clowp) lowv = fil;
693  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
694 
695  if (highy == -1 && accy >= highp) highy = fil;
696  if (highu == -1 && accu >= chighp) highu = fil;
697  if (highv == -1 && accv >= chighp) highv = fil;
698  if (highsat == -1 && accsat >= chighp) highsat = fil;
699  }
700 
701  maxhue = histhue[0];
702  medhue = -1;
703  for (fil = 0; fil < 360; fil++) {
704  tothue += histhue[fil] * fil;
705  acchue += histhue[fil];
706 
707  if (medhue == -1 && acchue > s->cfs / 2)
708  medhue = fil;
709  if (histhue[fil] > maxhue) {
710  maxhue = histhue[fil];
711  }
712  }
713 
714  av_frame_free(&s->frame_prev);
715  s->frame_prev = av_frame_clone(in);
716 
717 #define SET_META(key, fmt, val) do { \
718  snprintf(metabuf, sizeof(metabuf), fmt, val); \
719  av_dict_set(&out->metadata, "lavfi.signalstats." key, metabuf, 0); \
720 } while (0)
721 
722  SET_META("YMIN", "%d", miny);
723  SET_META("YLOW", "%d", lowy);
724  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
725  SET_META("YHIGH", "%d", highy);
726  SET_META("YMAX", "%d", maxy);
727 
728  SET_META("UMIN", "%d", minu);
729  SET_META("ULOW", "%d", lowu);
730  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
731  SET_META("UHIGH", "%d", highu);
732  SET_META("UMAX", "%d", maxu);
733 
734  SET_META("VMIN", "%d", minv);
735  SET_META("VLOW", "%d", lowv);
736  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
737  SET_META("VHIGH", "%d", highv);
738  SET_META("VMAX", "%d", maxv);
739 
740  SET_META("SATMIN", "%d", minsat);
741  SET_META("SATLOW", "%d", lowsat);
742  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
743  SET_META("SATHIGH", "%d", highsat);
744  SET_META("SATMAX", "%d", maxsat);
745 
746  SET_META("HUEMED", "%d", medhue);
747  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
748 
749  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
750  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
751  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
752 
753  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
754  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
755  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
756 
757  for (fil = 0; fil < FILT_NUMB; fil ++) {
758  if (s->filters & 1<<fil) {
759  char metaname[128];
760  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
761  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
762  av_dict_set(&out->metadata, metaname, metabuf, 0);
763  }
764  }
765 
766  if (in != out)
767  av_frame_free(&in);
768  return ff_filter_frame(outlink, out);
769 }
770 
772 {
773  AVFilterContext *ctx = link->dst;
774  SignalstatsContext *s = ctx->priv;
775  AVFilterLink *outlink = ctx->outputs[0];
776  AVFrame *out = in;
777  int i, j;
778  int w = 0, cw = 0, // in
779  pw = 0, cpw = 0; // prev
780  int fil;
781  char metabuf[128];
782  unsigned int *histy = s->histy,
783  *histu = s->histu,
784  *histv = s->histv,
785  histhue[360] = {0},
786  *histsat = s->histsat;
787  int miny = -1, minu = -1, minv = -1;
788  int maxy = -1, maxu = -1, maxv = -1;
789  int lowy = -1, lowu = -1, lowv = -1;
790  int highy = -1, highu = -1, highv = -1;
791  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
792  int lowp, highp, clowp, chighp;
793  int accy, accu, accv;
794  int accsat, acchue = 0;
795  int medhue, maxhue;
796  int64_t toty = 0, totu = 0, totv = 0, totsat=0;
797  int64_t tothue = 0;
798  int64_t dify = 0, difu = 0, difv = 0;
799  uint16_t masky = 0, masku = 0, maskv = 0;
800 
801  int filtot[FILT_NUMB] = {0};
802  AVFrame *prev;
803  int ret;
804  AVFrame *sat = s->frame_sat;
805  AVFrame *hue = s->frame_hue;
806  const uint16_t *p_sat = (uint16_t *)sat->data[0];
807  const uint16_t *p_hue = (uint16_t *)hue->data[0];
808  const int lsz_sat = sat->linesize[0] / 2;
809  const int lsz_hue = hue->linesize[0] / 2;
810  ThreadDataHueSatMetrics td_huesat = {
811  .src = in,
812  .dst_sat = sat,
813  .dst_hue = hue,
814  };
815 
816  if (!s->frame_prev)
817  s->frame_prev = av_frame_clone(in);
818 
819  prev = s->frame_prev;
820 
821  if (s->outfilter != FILTER_NONE) {
822  out = av_frame_clone(in);
823  if (!out) {
824  av_frame_free(&in);
825  return AVERROR(ENOMEM);
826  }
828  if (ret < 0) {
829  av_frame_free(&out);
830  av_frame_free(&in);
831  return ret;
832  }
833  }
834 
836  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
837 
838  // Calculate luma histogram and difference with previous frame or field.
839  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
840  for (j = 0; j < link->h; j++) {
841  for (i = 0; i < link->w; i++) {
842  const int yuv = AV_RN16(in->data[0] + w + i * 2);
843 
844  masky |= yuv;
845  histy[yuv]++;
846  dify += abs(yuv - (int)AV_RN16(prev->data[0] + pw + i * 2));
847  }
848  w += in->linesize[0];
849  pw += prev->linesize[0];
850  }
851 
852  // Calculate chroma histogram and difference with previous frame or field.
853  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
854  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
855  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
856  for (j = 0; j < s->chromah; j++) {
857  for (i = 0; i < s->chromaw; i++) {
858  const int yuvu = AV_RN16(in->data[1] + cw + i * 2);
859  const int yuvv = AV_RN16(in->data[2] + cw + i * 2);
860 
861  masku |= yuvu;
862  maskv |= yuvv;
863  histu[yuvu]++;
864  difu += abs(yuvu - (int)AV_RN16(prev->data[1] + cpw + i * 2));
865  histv[yuvv]++;
866  difv += abs(yuvv - (int)AV_RN16(prev->data[2] + cpw + i * 2));
867 
868  histsat[p_sat[i]]++;
869  histhue[((int16_t*)p_hue)[i]]++;
870  }
871  cw += in->linesize[1];
872  cpw += prev->linesize[1];
873  p_sat += lsz_sat;
874  p_hue += lsz_hue;
875  }
876 
877  for (fil = 0; fil < FILT_NUMB; fil ++) {
878  if (s->filters & 1<<fil) {
879  ThreadData td = {
880  .in = in,
881  .out = out != in && s->outfilter == fil ? out : NULL,
882  };
883  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
885  &td, s->jobs_rets, s->nb_jobs);
886  for (i = 0; i < s->nb_jobs; i++)
887  filtot[fil] += s->jobs_rets[i];
888  }
889  }
890 
891  // find low / high based on histogram percentile
892  // these only need to be calculated once.
893 
894  lowp = lrint(s->fs * 10 / 100.);
895  highp = lrint(s->fs * 90 / 100.);
896  clowp = lrint(s->cfs * 10 / 100.);
897  chighp = lrint(s->cfs * 90 / 100.);
898 
899  accy = accu = accv = accsat = 0;
900  for (fil = 0; fil < s->maxsize; fil++) {
901  if (miny < 0 && histy[fil]) miny = fil;
902  if (minu < 0 && histu[fil]) minu = fil;
903  if (minv < 0 && histv[fil]) minv = fil;
904  if (minsat < 0 && histsat[fil]) minsat = fil;
905 
906  if (histy[fil]) maxy = fil;
907  if (histu[fil]) maxu = fil;
908  if (histv[fil]) maxv = fil;
909  if (histsat[fil]) maxsat = fil;
910 
911  toty += histy[fil] * fil;
912  totu += histu[fil] * fil;
913  totv += histv[fil] * fil;
914  totsat += histsat[fil] * fil;
915 
916  accy += histy[fil];
917  accu += histu[fil];
918  accv += histv[fil];
919  accsat += histsat[fil];
920 
921  if (lowy == -1 && accy >= lowp) lowy = fil;
922  if (lowu == -1 && accu >= clowp) lowu = fil;
923  if (lowv == -1 && accv >= clowp) lowv = fil;
924  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
925 
926  if (highy == -1 && accy >= highp) highy = fil;
927  if (highu == -1 && accu >= chighp) highu = fil;
928  if (highv == -1 && accv >= chighp) highv = fil;
929  if (highsat == -1 && accsat >= chighp) highsat = fil;
930  }
931 
932  maxhue = histhue[0];
933  medhue = -1;
934  for (fil = 0; fil < 360; fil++) {
935  tothue += histhue[fil] * fil;
936  acchue += histhue[fil];
937 
938  if (medhue == -1 && acchue > s->cfs / 2)
939  medhue = fil;
940  if (histhue[fil] > maxhue) {
941  maxhue = histhue[fil];
942  }
943  }
944 
945  av_frame_free(&s->frame_prev);
946  s->frame_prev = av_frame_clone(in);
947 
948  SET_META("YMIN", "%d", miny);
949  SET_META("YLOW", "%d", lowy);
950  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
951  SET_META("YHIGH", "%d", highy);
952  SET_META("YMAX", "%d", maxy);
953 
954  SET_META("UMIN", "%d", minu);
955  SET_META("ULOW", "%d", lowu);
956  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
957  SET_META("UHIGH", "%d", highu);
958  SET_META("UMAX", "%d", maxu);
959 
960  SET_META("VMIN", "%d", minv);
961  SET_META("VLOW", "%d", lowv);
962  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
963  SET_META("VHIGH", "%d", highv);
964  SET_META("VMAX", "%d", maxv);
965 
966  SET_META("SATMIN", "%d", minsat);
967  SET_META("SATLOW", "%d", lowsat);
968  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
969  SET_META("SATHIGH", "%d", highsat);
970  SET_META("SATMAX", "%d", maxsat);
971 
972  SET_META("HUEMED", "%d", medhue);
973  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
974 
975  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
976  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
977  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
978 
979  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
980  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
981  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
982 
983  for (fil = 0; fil < FILT_NUMB; fil ++) {
984  if (s->filters & 1<<fil) {
985  char metaname[128];
986  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
987  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
988  av_dict_set(&out->metadata, metaname, metabuf, 0);
989  }
990  }
991 
992  if (in != out)
993  av_frame_free(&in);
994  return ff_filter_frame(outlink, out);
995 }
996 
998 {
999  AVFilterContext *ctx = link->dst;
1000  SignalstatsContext *s = ctx->priv;
1001 
1002  if (s->depth > 8)
1003  return filter_frame16(link, in);
1004  else
1005  return filter_frame8(link, in);
1006 }
1007 
1009  {
1010  .name = "default",
1011  .type = AVMEDIA_TYPE_VIDEO,
1012  .filter_frame = filter_frame,
1013  },
1014 };
1015 
1017  {
1018  .name = "default",
1019  .config_props = config_output,
1020  .type = AVMEDIA_TYPE_VIDEO,
1021  },
1022 };
1023 
1025  .name = "signalstats",
1026  .description = "Generate statistics from video analysis.",
1027  .init = init,
1028  .uninit = uninit,
1029  .priv_size = sizeof(SignalstatsContext),
1033  .priv_class = &signalstats_class,
1034  .flags = AVFILTER_FLAG_SLICE_THREADS,
1035 };
td
#define td
Definition: regdef.h:70
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
filter_frame8
static int filter_frame8(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:540
r
const char * r
Definition: vf_curves.c:126
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
compute_sat_hue_metrics16
static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:496
SignalstatsContext::vsub
int vsub
Definition: vf_signalstats.c:42
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:216
SignalstatsContext::rgba_color
uint8_t rgba_color[4]
Definition: vf_signalstats.c:49
SignalstatsContext::chromah
int chromah
Definition: vf_signalstats.c:39
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1018
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_signalstats.c:90
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2962
SET_META
#define SET_META(key, fmt, val)
FILTER_PIXFMTS_ARRAY
#define FILTER_PIXFMTS_ARRAY(array)
Definition: internal.h:162
floorf
static __device__ float floorf(float a)
Definition: cuda_runtime.h:172
filter8_brng
static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:213
atan2f
#define atan2f(y, x)
Definition: libm.h:45
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
AV_RN16
#define AV_RN16(p)
Definition: intreadwrite.h:358
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:88
SignalstatsContext::fs
int fs
Definition: vf_signalstats.c:44
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:340
pixdesc.h
AVFrame::width
int width
Definition: frame.h:412
SignalstatsContext::nb_jobs
int nb_jobs
Definition: vf_signalstats.c:51
w
uint8_t w
Definition: llviddspenc.c:38
AVOption
AVOption.
Definition: opt.h:346
b
#define b
Definition: input.c:41
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:478
VREP_START
#define VREP_START
Definition: vf_signalstats.c:376
SignalstatsContext::histv
int * histv
Definition: vf_signalstats.c:55
ThreadData::in
const AVFrame * in
Definition: vf_signalstats.c:62
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:106
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(signalstats)
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
av_popcount
#define av_popcount
Definition: common.h:152
ThreadData::out
AVFrame * out
Definition: af_adeclick.c:526
filter_frame16
static int filter_frame16(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:771
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:361
FLAGS
#define FLAGS
Definition: vf_signalstats.c:72
FilterMode
FilterMode
Definition: vp9.h:64
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:476
SignalstatsContext::histu
int * histu
Definition: vf_signalstats.c:55
filter16_tout
static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:335
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:462
filter_tout_outlier
static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
Definition: vf_signalstats.c:282
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: vf_signalstats.c:121
FILT_NUMB
@ FILT_NUMB
Definition: vf_signalstats.c:34
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:33
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:76
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:481
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:283
SignalstatsContext
Definition: vf_signalstats.c:37
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_signalstats.c:107
mult
static int16_t mult(Float11 *f1, Float11 *f2)
Definition: g726.c:60
slice_start
static int slice_start(SliceContext *sc, VVCContext *s, VVCFrameContext *fc, const CodedBitstreamUnit *unit, const int is_first_slice)
Definition: vvcdec.c:685
lrint
#define lrint
Definition: tablegen.h:53
FILTER_VREP
@ FILTER_VREP
Definition: vf_signalstats.c:32
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:490
mask
static const uint16_t mask[17]
Definition: lzw.c:38
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:86
alloc_frame
static AVFrame * alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
Definition: vf_signalstats.c:136
SignalstatsContext::yuv_color
int yuv_color[3]
Definition: vf_signalstats.c:50
OFFSET
#define OFFSET(x)
Definition: vf_signalstats.c:71
SignalstatsContext::histsat
int * histsat
Definition: vf_signalstats.c:55
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:491
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
g
const char * g
Definition: vf_curves.c:127
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:1717
filters
#define filters(fmt, type, inverse, clp, inverset, clip, one, clip_fn, packed)
Definition: af_crystalizer.c:54
filters.h
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:475
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:489
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:521
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:182
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:87
frame
static AVFrame * frame
Definition: demux_decode.c:54
ff_inlink_make_frame_writable
int ff_inlink_make_frame_writable(AVFilterLink *link, AVFrame **rframe)
Make sure a frame is writable.
Definition: avfilter.c:1492
arg
const char * arg
Definition: jacosubdec.c:67
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
FILTER_BRNG
@ FILTER_BRNG
Definition: vf_signalstats.c:33
burn_frame16
static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:203
filter8_tout
static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:287
ThreadDataHueSatMetrics::dst_hue
AVFrame * dst_hue
Definition: vf_signalstats.c:68
SignalstatsContext::outfilter
int outfilter
Definition: vf_signalstats.c:46
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
AV_OPT_TYPE_COLOR
@ AV_OPT_TYPE_COLOR
Definition: opt.h:250
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:415
AV_PIX_FMT_YUV440P10
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:480
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:479
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
ff_vf_signalstats
const AVFilter ff_vf_signalstats
Definition: vf_signalstats.c:1024
SignalstatsContext::hsub
int hsub
Definition: vf_signalstats.c:41
name
const char * name
Definition: vf_signalstats.c:448
signalstats_inputs
static const AVFilterPad signalstats_inputs[]
Definition: vf_signalstats.c:1008
burn_frame8
static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:194
f
f
Definition: af_crystalizer.c:121
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:997
SignalstatsContext::histy
int * histy
Definition: vf_signalstats.c:55
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:483
FILTER3
#define FILTER3(j)
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:485
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:427
signalstats_outputs
static const AVFilterPad signalstats_outputs[]
Definition: vf_signalstats.c:1016
ThreadDataHueSatMetrics::dst_sat
AVFrame * dst_sat
Definition: vf_signalstats.c:68
ThreadDataHueSatMetrics
Definition: vf_signalstats.c:66
M_PI
#define M_PI
Definition: mathematics.h:67
internal.h
compute_sat_hue_metrics8
static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:458
SignalstatsContext::filters
int filters
Definition: vf_signalstats.c:47
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
process8
int(* process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:449
FILTER_TOUT
@ FILTER_TOUT
Definition: vf_signalstats.c:31
SignalstatsContext::frame_prev
AVFrame * frame_prev
Definition: vf_signalstats.c:48
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:825
SignalstatsContext::maxsize
int maxsize
Definition: vf_signalstats.c:54
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_signalstats.c:154
ThreadData
Used for passing data between threads.
Definition: dsddec.c:69
process16
int(* process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:450
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:107
SignalstatsContext::depth
int depth
Definition: vf_signalstats.c:43
filt
static const int8_t filt[NUMTAPS *2]
Definition: af_earwax.c:39
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:39
SignalstatsContext::chromaw
int chromaw
Definition: vf_signalstats.c:40
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:477
AVFilter
Filter definition.
Definition: avfilter.h:166
filters_def
static const struct @288 filters_def[]
ret
ret
Definition: filter_design.txt:187
pixfmt
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:366
SignalstatsContext::cfs
int cfs
Definition: vf_signalstats.c:45
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:482
filter16_brng
static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:247
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:487
AVFrame::height
int height
Definition: frame.h:412
SignalstatsContext::frame_sat
AVFrame * frame_sat
Definition: vf_signalstats.c:57
FILTER_NONE
@ FILTER_NONE
Definition: vf_signalstats.c:30
signalstats_options
static const AVOption signalstats_options[]
Definition: vf_signalstats.c:74
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:235
filter8_vrep
static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:378
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
desc
const char * desc
Definition: libsvtav1.c:73
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
filter16_vrep
static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:412
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:183
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:80
AV_OPT_TYPE_FLAGS
@ AV_OPT_TYPE_FLAGS
Definition: opt.h:234
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:385
SignalstatsContext::jobs_rets
int * jobs_rets
Definition: vf_signalstats.c:52
AV_PIX_FMT_YUV440P12
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:484
h
h
Definition: vp9dsp_template.c:2038
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:488
ff_filter_execute
static av_always_inline int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: internal.h:134
compute_bit_depth
static unsigned compute_bit_depth(uint16_t mask)
Definition: vf_signalstats.c:535
int
int
Definition: ffmpeg_filter.c:425
SignalstatsContext::frame_hue
AVFrame * frame_hue
Definition: vf_signalstats.c:58
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:244
snprintf
#define snprintf
Definition: snprintf.h:34
ThreadDataHueSatMetrics::src
const AVFrame * src
Definition: vf_signalstats.c:67
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:486
AV_WN16
#define AV_WN16(p, v)
Definition: intreadwrite.h:370