FFmpeg
vf_psnr.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2011 Roger Pau Monné <roger.pau@entel.upc.edu>
3  * Copyright (c) 2011 Stefano Sabatini
4  * Copyright (c) 2013 Paul B Mahol
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * Caculate the PSNR between two input videos.
26  */
27 
28 #include "libavutil/avstring.h"
29 #include "libavutil/opt.h"
30 #include "libavutil/pixdesc.h"
31 #include "avfilter.h"
32 #include "drawutils.h"
33 #include "formats.h"
34 #include "framesync.h"
35 #include "internal.h"
36 #include "psnr.h"
37 #include "video.h"
38 
39 typedef struct PSNRContext {
40  const AVClass *class;
42  double mse, min_mse, max_mse, mse_comp[4];
43  uint64_t nb_frames;
44  FILE *stats_file;
49  int max[4], average_max;
50  int is_rgb;
52  char comps[4];
54  int planewidth[4];
55  int planeheight[4];
56  double planeweight[4];
58 } PSNRContext;
59 
60 #define OFFSET(x) offsetof(PSNRContext, x)
61 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
62 
63 static const AVOption psnr_options[] = {
64  {"stats_file", "Set file where to store per-frame difference information", OFFSET(stats_file_str), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, FLAGS },
65  {"f", "Set file where to store per-frame difference information", OFFSET(stats_file_str), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, FLAGS },
66  {"stats_version", "Set the format version for the stats file.", OFFSET(stats_version), AV_OPT_TYPE_INT, {.i64=1}, 1, 2, FLAGS },
67  {"output_max", "Add raw stats (max values) to the output log.", OFFSET(stats_add_max), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS},
68  { NULL }
69 };
70 
72 
73 static inline unsigned pow_2(unsigned base)
74 {
75  return base*base;
76 }
77 
78 static inline double get_psnr(double mse, uint64_t nb_frames, int max)
79 {
80  return 10.0 * log10(pow_2(max) / (mse / nb_frames));
81 }
82 
83 static uint64_t sse_line_8bit(const uint8_t *main_line, const uint8_t *ref_line, int outw)
84 {
85  int j;
86  unsigned m2 = 0;
87 
88  for (j = 0; j < outw; j++)
89  m2 += pow_2(main_line[j] - ref_line[j]);
90 
91  return m2;
92 }
93 
94 static uint64_t sse_line_16bit(const uint8_t *_main_line, const uint8_t *_ref_line, int outw)
95 {
96  int j;
97  uint64_t m2 = 0;
98  const uint16_t *main_line = (const uint16_t *) _main_line;
99  const uint16_t *ref_line = (const uint16_t *) _ref_line;
100 
101  for (j = 0; j < outw; j++)
102  m2 += pow_2(main_line[j] - ref_line[j]);
103 
104  return m2;
105 }
106 
107 static inline
109  const uint8_t *main_data[4], const int main_linesizes[4],
110  const uint8_t *ref_data[4], const int ref_linesizes[4],
111  int w, int h, double mse[4])
112 {
113  int i, c;
114 
115  for (c = 0; c < s->nb_components; c++) {
116  const int outw = s->planewidth[c];
117  const int outh = s->planeheight[c];
118  const uint8_t *main_line = main_data[c];
119  const uint8_t *ref_line = ref_data[c];
120  const int ref_linesize = ref_linesizes[c];
121  const int main_linesize = main_linesizes[c];
122  uint64_t m = 0;
123  for (i = 0; i < outh; i++) {
124  m += s->dsp.sse_line(main_line, ref_line, outw);
125  ref_line += ref_linesize;
126  main_line += main_linesize;
127  }
128  mse[c] = m / (double)(outw * outh);
129  }
130 }
131 
132 static void set_meta(AVDictionary **metadata, const char *key, char comp, float d)
133 {
134  char value[128];
135  snprintf(value, sizeof(value), "%0.2f", d);
136  if (comp) {
137  char key2[128];
138  snprintf(key2, sizeof(key2), "%s%c", key, comp);
139  av_dict_set(metadata, key2, value, 0);
140  } else {
141  av_dict_set(metadata, key, value, 0);
142  }
143 }
144 
145 static int do_psnr(FFFrameSync *fs)
146 {
147  AVFilterContext *ctx = fs->parent;
148  PSNRContext *s = ctx->priv;
149  AVFrame *master, *ref;
150  double comp_mse[4], mse = 0;
151  int ret, j, c;
152  AVDictionary **metadata;
153 
155  if (ret < 0)
156  return ret;
157  if (!ref)
158  return ff_filter_frame(ctx->outputs[0], master);
159  metadata = &master->metadata;
160 
161  compute_images_mse(s, (const uint8_t **)master->data, master->linesize,
162  (const uint8_t **)ref->data, ref->linesize,
163  master->width, master->height, comp_mse);
164 
165  for (j = 0; j < s->nb_components; j++)
166  mse += comp_mse[j] * s->planeweight[j];
167 
168  s->min_mse = FFMIN(s->min_mse, mse);
169  s->max_mse = FFMAX(s->max_mse, mse);
170 
171  s->mse += mse;
172  for (j = 0; j < s->nb_components; j++)
173  s->mse_comp[j] += comp_mse[j];
174  s->nb_frames++;
175 
176  for (j = 0; j < s->nb_components; j++) {
177  c = s->is_rgb ? s->rgba_map[j] : j;
178  set_meta(metadata, "lavfi.psnr.mse.", s->comps[j], comp_mse[c]);
179  set_meta(metadata, "lavfi.psnr.psnr.", s->comps[j], get_psnr(comp_mse[c], 1, s->max[c]));
180  }
181  set_meta(metadata, "lavfi.psnr.mse_avg", 0, mse);
182  set_meta(metadata, "lavfi.psnr.psnr_avg", 0, get_psnr(mse, 1, s->average_max));
183 
184  if (s->stats_file) {
185  if (s->stats_version == 2 && !s->stats_header_written) {
186  fprintf(s->stats_file, "psnr_log_version:2 fields:n");
187  fprintf(s->stats_file, ",mse_avg");
188  for (j = 0; j < s->nb_components; j++) {
189  fprintf(s->stats_file, ",mse_%c", s->comps[j]);
190  }
191  fprintf(s->stats_file, ",psnr_avg");
192  for (j = 0; j < s->nb_components; j++) {
193  fprintf(s->stats_file, ",psnr_%c", s->comps[j]);
194  }
195  if (s->stats_add_max) {
196  fprintf(s->stats_file, ",max_avg");
197  for (j = 0; j < s->nb_components; j++) {
198  fprintf(s->stats_file, ",max_%c", s->comps[j]);
199  }
200  }
201  fprintf(s->stats_file, "\n");
202  s->stats_header_written = 1;
203  }
204  fprintf(s->stats_file, "n:%"PRId64" mse_avg:%0.2f ", s->nb_frames, mse);
205  for (j = 0; j < s->nb_components; j++) {
206  c = s->is_rgb ? s->rgba_map[j] : j;
207  fprintf(s->stats_file, "mse_%c:%0.2f ", s->comps[j], comp_mse[c]);
208  }
209  fprintf(s->stats_file, "psnr_avg:%0.2f ", get_psnr(mse, 1, s->average_max));
210  for (j = 0; j < s->nb_components; j++) {
211  c = s->is_rgb ? s->rgba_map[j] : j;
212  fprintf(s->stats_file, "psnr_%c:%0.2f ", s->comps[j],
213  get_psnr(comp_mse[c], 1, s->max[c]));
214  }
215  if (s->stats_version == 2 && s->stats_add_max) {
216  fprintf(s->stats_file, "max_avg:%d ", s->average_max);
217  for (j = 0; j < s->nb_components; j++) {
218  c = s->is_rgb ? s->rgba_map[j] : j;
219  fprintf(s->stats_file, "max_%c:%d ", s->comps[j], s->max[c]);
220  }
221  }
222  fprintf(s->stats_file, "\n");
223  }
224 
225  return ff_filter_frame(ctx->outputs[0], master);
226 }
227 
229 {
230  PSNRContext *s = ctx->priv;
231 
232  s->min_mse = +INFINITY;
233  s->max_mse = -INFINITY;
234 
235  if (s->stats_file_str) {
236  if (s->stats_version < 2 && s->stats_add_max) {
238  "stats_add_max was specified but stats_version < 2.\n" );
239  return AVERROR(EINVAL);
240  }
241  if (!strcmp(s->stats_file_str, "-")) {
242  s->stats_file = stdout;
243  } else {
244  s->stats_file = fopen(s->stats_file_str, "w");
245  if (!s->stats_file) {
246  int err = AVERROR(errno);
247  char buf[128];
248  av_strerror(err, buf, sizeof(buf));
249  av_log(ctx, AV_LOG_ERROR, "Could not open stats file %s: %s\n",
250  s->stats_file_str, buf);
251  return err;
252  }
253  }
254  }
255 
256  s->fs.on_event = do_psnr;
257  return 0;
258 }
259 
261 {
262  static const enum AVPixelFormat pix_fmts[] = {
264 #define PF_NOALPHA(suf) AV_PIX_FMT_YUV420##suf, AV_PIX_FMT_YUV422##suf, AV_PIX_FMT_YUV444##suf
265 #define PF_ALPHA(suf) AV_PIX_FMT_YUVA420##suf, AV_PIX_FMT_YUVA422##suf, AV_PIX_FMT_YUVA444##suf
266 #define PF(suf) PF_NOALPHA(suf), PF_ALPHA(suf)
267  PF(P), PF(P9), PF(P10), PF_NOALPHA(P12), PF_NOALPHA(P14), PF(P16),
275  };
276 
278  if (!fmts_list)
279  return AVERROR(ENOMEM);
280  return ff_set_common_formats(ctx, fmts_list);
281 }
282 
284 {
286  AVFilterContext *ctx = inlink->dst;
287  PSNRContext *s = ctx->priv;
288  double average_max;
289  unsigned sum;
290  int j;
291 
292  s->nb_components = desc->nb_components;
293  if (ctx->inputs[0]->w != ctx->inputs[1]->w ||
294  ctx->inputs[0]->h != ctx->inputs[1]->h) {
295  av_log(ctx, AV_LOG_ERROR, "Width and height of input videos must be same.\n");
296  return AVERROR(EINVAL);
297  }
298  if (ctx->inputs[0]->format != ctx->inputs[1]->format) {
299  av_log(ctx, AV_LOG_ERROR, "Inputs must be of same pixel format.\n");
300  return AVERROR(EINVAL);
301  }
302 
303  s->max[0] = (1 << desc->comp[0].depth) - 1;
304  s->max[1] = (1 << desc->comp[1].depth) - 1;
305  s->max[2] = (1 << desc->comp[2].depth) - 1;
306  s->max[3] = (1 << desc->comp[3].depth) - 1;
307 
308  s->is_rgb = ff_fill_rgba_map(s->rgba_map, inlink->format) >= 0;
309  s->comps[0] = s->is_rgb ? 'r' : 'y' ;
310  s->comps[1] = s->is_rgb ? 'g' : 'u' ;
311  s->comps[2] = s->is_rgb ? 'b' : 'v' ;
312  s->comps[3] = 'a';
313 
314  s->planeheight[1] = s->planeheight[2] = AV_CEIL_RSHIFT(inlink->h, desc->log2_chroma_h);
315  s->planeheight[0] = s->planeheight[3] = inlink->h;
316  s->planewidth[1] = s->planewidth[2] = AV_CEIL_RSHIFT(inlink->w, desc->log2_chroma_w);
317  s->planewidth[0] = s->planewidth[3] = inlink->w;
318  sum = 0;
319  for (j = 0; j < s->nb_components; j++)
320  sum += s->planeheight[j] * s->planewidth[j];
321  average_max = 0;
322  for (j = 0; j < s->nb_components; j++) {
323  s->planeweight[j] = (double) s->planeheight[j] * s->planewidth[j] / sum;
324  average_max += s->max[j] * s->planeweight[j];
325  }
326  s->average_max = lrint(average_max);
327 
328  s->dsp.sse_line = desc->comp[0].depth > 8 ? sse_line_16bit : sse_line_8bit;
329  if (ARCH_X86)
330  ff_psnr_init_x86(&s->dsp, desc->comp[0].depth);
331 
332  return 0;
333 }
334 
335 static int config_output(AVFilterLink *outlink)
336 {
337  AVFilterContext *ctx = outlink->src;
338  PSNRContext *s = ctx->priv;
339  AVFilterLink *mainlink = ctx->inputs[0];
340  int ret;
341 
343  if (ret < 0)
344  return ret;
345  outlink->w = mainlink->w;
346  outlink->h = mainlink->h;
347  outlink->time_base = mainlink->time_base;
348  outlink->sample_aspect_ratio = mainlink->sample_aspect_ratio;
349  outlink->frame_rate = mainlink->frame_rate;
350  if ((ret = ff_framesync_configure(&s->fs)) < 0)
351  return ret;
352 
353  return 0;
354 }
355 
357 {
358  PSNRContext *s = ctx->priv;
359  return ff_framesync_activate(&s->fs);
360 }
361 
363 {
364  PSNRContext *s = ctx->priv;
365 
366  if (s->nb_frames > 0) {
367  int j;
368  char buf[256];
369 
370  buf[0] = 0;
371  for (j = 0; j < s->nb_components; j++) {
372  int c = s->is_rgb ? s->rgba_map[j] : j;
373  av_strlcatf(buf, sizeof(buf), " %c:%f", s->comps[j],
374  get_psnr(s->mse_comp[c], s->nb_frames, s->max[c]));
375  }
376  av_log(ctx, AV_LOG_INFO, "PSNR%s average:%f min:%f max:%f\n",
377  buf,
378  get_psnr(s->mse, s->nb_frames, s->average_max),
379  get_psnr(s->max_mse, 1, s->average_max),
380  get_psnr(s->min_mse, 1, s->average_max));
381  }
382 
383  ff_framesync_uninit(&s->fs);
384 
385  if (s->stats_file && s->stats_file != stdout)
386  fclose(s->stats_file);
387 }
388 
389 static const AVFilterPad psnr_inputs[] = {
390  {
391  .name = "main",
392  .type = AVMEDIA_TYPE_VIDEO,
393  },{
394  .name = "reference",
395  .type = AVMEDIA_TYPE_VIDEO,
396  .config_props = config_input_ref,
397  },
398  { NULL }
399 };
400 
401 static const AVFilterPad psnr_outputs[] = {
402  {
403  .name = "default",
404  .type = AVMEDIA_TYPE_VIDEO,
405  .config_props = config_output,
406  },
407  { NULL }
408 };
409 
411  .name = "psnr",
412  .description = NULL_IF_CONFIG_SMALL("Calculate the PSNR between two video streams."),
413  .preinit = psnr_framesync_preinit,
414  .init = init,
415  .uninit = uninit,
416  .query_formats = query_formats,
417  .activate = activate,
418  .priv_size = sizeof(PSNRContext),
419  .priv_class = &psnr_class,
420  .inputs = psnr_inputs,
422 };
AV_PIX_FMT_GBRAP16
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:409
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:117
PSNRContext::stats_version
int stats_version
Definition: vf_psnr.c:46
config_input_ref
static int config_input_ref(AVFilterLink *inlink)
Definition: vf_psnr.c:283
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
psnr
static double psnr(double d)
Definition: ffmpeg.c:1355
PSNRContext::max_mse
double max_mse
Definition: vf_psnr.c:42
do_psnr
static int do_psnr(FFFrameSync *fs)
Definition: vf_psnr.c:145
INFINITY
#define INFINITY
Definition: mathematics.h:67
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
set_meta
static void set_meta(AVDictionary **metadata, const char *key, char comp, float d)
Definition: vf_psnr.c:132
PSNRContext::max
int max[4]
Definition: vf_psnr.c:49
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:293
PSNRContext
Definition: vf_psnr.c:39
PSNRContext::planewidth
int planewidth[4]
Definition: vf_psnr.c:54
comp
static void comp(unsigned char *dst, ptrdiff_t dst_stride, unsigned char *src, ptrdiff_t src_stride, int add)
Definition: eamad.c:83
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1080
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2522
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
PSNRContext::is_rgb
int is_rgb
Definition: vf_psnr.c:50
PSNRContext::stats_file_str
char * stats_file_str
Definition: vf_psnr.c:45
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
pixdesc.h
w
uint8_t w
Definition: llviddspenc.c:38
AVOption
AVOption.
Definition: opt.h:246
base
uint8_t base
Definition: vp3data.h:202
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
max
#define max(a, b)
Definition: cuda_runtime.h:33
AVDictionary
Definition: dict.c:30
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:148
FFFrameSync
Frame sync structure.
Definition: framesync.h:146
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_psnr.c:228
video.h
av_strlcatf
size_t av_strlcatf(char *dst, size_t size, const char *fmt,...)
Definition: avstring.c:101
ff_psnr_init_x86
void ff_psnr_init_x86(PSNRDSPContext *dsp, int bpp)
Definition: vf_psnr_init.c:28
AV_PIX_FMT_GRAY9
#define AV_PIX_FMT_GRAY9
Definition: pixfmt.h:367
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
psnr_inputs
static const AVFilterPad psnr_inputs[]
Definition: vf_psnr.c:389
compute_images_mse
static void compute_images_mse(PSNRContext *s, const uint8_t *main_data[4], const int main_linesizes[4], const uint8_t *ref_data[4], const int ref_linesizes[4], int w, int h, double mse[4])
Definition: vf_psnr.c:108
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:405
AV_PIX_FMT_GBRAP
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:215
av_strerror
int av_strerror(int errnum, char *errbuf, size_t errbuf_size)
Put a description of the AVERROR code errnum in errbuf.
Definition: error.c:105
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:403
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:371
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:258
lrint
#define lrint
Definition: tablegen.h:53
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
buf
void * buf
Definition: avisynth_c.h:766
av_cold
#define av_cold
Definition: attributes.h:84
ff_set_common_formats
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:568
psnr.h
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
AV_PIX_FMT_GBRAP10
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:407
s
#define s(width, name)
Definition: cbs_vp9.c:257
AV_PIX_FMT_GBRAP12
#define AV_PIX_FMT_GBRAP12
Definition: pixfmt.h:408
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
FRAMESYNC_DEFINE_CLASS
FRAMESYNC_DEFINE_CLASS(psnr, PSNRContext, fs)
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:275
PSNRContext::mse
double mse
Definition: vf_psnr.c:42
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AV_PIX_FMT_GRAY14
#define AV_PIX_FMT_GRAY14
Definition: pixfmt.h:370
key
const char * key
Definition: hwcontext_opencl.c:168
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
AV_PIX_FMT_GRAY10
#define AV_PIX_FMT_GRAY10
Definition: pixfmt.h:368
AV_PIX_FMT_GBRP16
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:406
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_psnr.c:260
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
NULL
#define NULL
Definition: coverity.c:32
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:259
ff_vf_psnr
AVFilter ff_vf_psnr
Definition: vf_psnr.c:410
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
PSNRContext::comps
char comps[4]
Definition: vf_psnr.c:52
psnr_options
static const AVOption psnr_options[]
Definition: vf_psnr.c:63
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:402
PSNRDSPContext
Definition: psnr.h:27
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
sse_line_8bit
static uint64_t sse_line_8bit(const uint8_t *main_line, const uint8_t *ref_line, int outw)
Definition: vf_psnr.c:83
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
PSNRContext::stats_header_written
int stats_header_written
Definition: vf_psnr.c:47
PSNRContext::rgba_map
uint8_t rgba_map[4]
Definition: vf_psnr.c:51
PF_NOALPHA
#define PF_NOALPHA(suf)
sse_line_16bit
static uint64_t sse_line_16bit(const uint8_t *_main_line, const uint8_t *_ref_line, int outw)
Definition: vf_psnr.c:94
desc
const char * desc
Definition: nvenc.c:68
psnr_outputs
static const AVFilterPad psnr_outputs[]
Definition: vf_psnr.c:401
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:188
ff_framesync_init_dualinput
int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
Initialize a frame sync structure for dualinput.
Definition: framesync.c:361
master
const char * master
Definition: vf_curves.c:117
P
#define P
PSNRContext::nb_components
int nb_components
Definition: vf_psnr.c:53
PSNRContext::fs
FFFrameSync fs
Definition: vf_psnr.c:41
pow_2
static unsigned pow_2(unsigned base)
Definition: vf_psnr.c:73
PSNRContext::planeheight
int planeheight[4]
Definition: vf_psnr.c:55
FFMAX
#define FFMAX(a, b)
Definition: common.h:94
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_psnr.c:362
PSNRContext::min_mse
double min_mse
Definition: vf_psnr.c:42
FFMIN
#define FFMIN(a, b)
Definition: common.h:96
PF
#define PF(suf)
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:187
internal.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:404
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_psnr.c:335
PSNRContext::mse_comp
double mse_comp[4]
Definition: vf_psnr.c:42
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:100
uint8_t
uint8_t
Definition: audio_convert.c:194
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:60
PSNRContext::planeweight
double planeweight[4]
Definition: vf_psnr.c:56
PSNRContext::average_max
int average_max
Definition: vf_psnr.c:49
PSNRContext::dsp
PSNRDSPContext dsp
Definition: vf_psnr.c:57
PSNRContext::stats_add_max
int stats_add_max
Definition: vf_psnr.c:48
AVFilter
Filter definition.
Definition: avfilter.h:144
ret
ret
Definition: filter_design.txt:187
framesync.h
get_psnr
static double get_psnr(double mse, uint64_t nb_frames, int max)
Definition: vf_psnr.c:78
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:223
avfilter.h
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:107
activate
static int activate(AVFilterContext *ctx)
Definition: vf_psnr.c:356
AVFilterContext
An instance of a filter.
Definition: avfilter.h:338
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:168
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:240
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
FLAGS
#define FLAGS
Definition: vf_psnr.c:61
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
ff_fill_rgba_map
int ff_fill_rgba_map(uint8_t *rgba_map, enum AVPixelFormat pix_fmt)
Definition: drawutils.c:35
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
PSNRContext::nb_frames
uint64_t nb_frames
Definition: vf_psnr.c:43
h
h
Definition: vp9dsp_template.c:2038
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:344
avstring.h
ff_framesync_dualinput_get
int ff_framesync_dualinput_get(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
Definition: framesync.c:379
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:227
AV_PIX_FMT_GRAY12
#define AV_PIX_FMT_GRAY12
Definition: pixfmt.h:369
drawutils.h
PSNRContext::stats_file
FILE * stats_file
Definition: vf_psnr.c:44
snprintf
#define snprintf
Definition: snprintf.h:34
OFFSET
#define OFFSET(x)
Definition: vf_psnr.c:60