FFmpeg
vf_fieldmatch.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2012 Fredrik Mellbin
3  * Copyright (c) 2013 Clément Bœsch
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Fieldmatching filter, ported from VFM filter (VapourSynth) by Clément.
25  * Fredrik Mellbin is the author of the VIVTC/VFM filter, which is itself a
26  * light clone of the TIVTC/TFM (AviSynth) filter written by Kevin Stone
27  * (tritical), the original author.
28  *
29  * @see http://bengal.missouri.edu/~kes25c/
30  * @see http://www.vapoursynth.com/about/
31  */
32 
33 #include <inttypes.h>
34 
35 #include "libavutil/avassert.h"
36 #include "libavutil/imgutils.h"
37 #include "libavutil/opt.h"
38 #include "libavutil/timestamp.h"
39 #include "avfilter.h"
40 #include "filters.h"
41 #include "internal.h"
42 
43 #define INPUT_MAIN 0
44 #define INPUT_CLEANSRC 1
45 
50 };
51 
60 };
61 
67 };
68 
69 enum comb_dbg {
74 };
75 
76 typedef struct FieldMatchContext {
77  const AVClass *class;
78 
79  AVFrame *prv, *src, *nxt; ///< main sliding window of 3 frames
80  AVFrame *prv2, *src2, *nxt2; ///< sliding window of the optional second stream
81  int got_frame[2]; ///< frame request flag for each input stream
82  int hsub[2], vsub[2]; ///< chroma subsampling values
83  int bpc; ///< bytes per component
84  uint32_t eof; ///< bitmask for end of stream
85  int64_t lastscdiff;
86  int64_t lastn;
87 
88  /* options */
89  int order;
90  int ppsrc;
91  int mode; ///< matching_mode
92  int field;
93  int mchroma;
94  int y0, y1;
95  int64_t scthresh;
96  double scthresh_flt;
97  int combmatch; ///< comb_matching_mode
98  int combdbg;
99  int cthresh;
100  int chroma;
102  int combpel;
103 
104  /* misc buffers */
105  uint8_t *map_data[4];
106  int map_linesize[4];
107  uint8_t *cmask_data[4];
109  int *c_array;
111  uint8_t *tbuffer;
113 
114 #define OFFSET(x) offsetof(FieldMatchContext, x)
115 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
116 
117 static const AVOption fieldmatch_options[] = {
118  { "order", "specify the assumed field order", OFFSET(order), AV_OPT_TYPE_INT, {.i64=FM_PARITY_AUTO}, -1, 1, FLAGS, "order" },
119  { "auto", "auto detect parity", 0, AV_OPT_TYPE_CONST, {.i64=FM_PARITY_AUTO}, INT_MIN, INT_MAX, FLAGS, "order" },
120  { "bff", "assume bottom field first", 0, AV_OPT_TYPE_CONST, {.i64=FM_PARITY_BOTTOM}, INT_MIN, INT_MAX, FLAGS, "order" },
121  { "tff", "assume top field first", 0, AV_OPT_TYPE_CONST, {.i64=FM_PARITY_TOP}, INT_MIN, INT_MAX, FLAGS, "order" },
122  { "mode", "set the matching mode or strategy to use", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=MODE_PC_N}, MODE_PC, NB_MODE-1, FLAGS, "mode" },
123  { "pc", "2-way match (p/c)", 0, AV_OPT_TYPE_CONST, {.i64=MODE_PC}, INT_MIN, INT_MAX, FLAGS, "mode" },
124  { "pc_n", "2-way match + 3rd match on combed (p/c + u)", 0, AV_OPT_TYPE_CONST, {.i64=MODE_PC_N}, INT_MIN, INT_MAX, FLAGS, "mode" },
125  { "pc_u", "2-way match + 3rd match (same order) on combed (p/c + u)", 0, AV_OPT_TYPE_CONST, {.i64=MODE_PC_U}, INT_MIN, INT_MAX, FLAGS, "mode" },
126  { "pc_n_ub", "2-way match + 3rd match on combed + 4th/5th matches if still combed (p/c + u + u/b)", 0, AV_OPT_TYPE_CONST, {.i64=MODE_PC_N_UB}, INT_MIN, INT_MAX, FLAGS, "mode" },
127  { "pcn", "3-way match (p/c/n)", 0, AV_OPT_TYPE_CONST, {.i64=MODE_PCN}, INT_MIN, INT_MAX, FLAGS, "mode" },
128  { "pcn_ub", "3-way match + 4th/5th matches on combed (p/c/n + u/b)", 0, AV_OPT_TYPE_CONST, {.i64=MODE_PCN_UB}, INT_MIN, INT_MAX, FLAGS, "mode" },
129  { "ppsrc", "mark main input as a pre-processed input and activate clean source input stream", OFFSET(ppsrc), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
130  { "field", "set the field to match from", OFFSET(field), AV_OPT_TYPE_INT, {.i64=FM_PARITY_AUTO}, -1, 1, FLAGS, "field" },
131  { "auto", "automatic (same value as 'order')", 0, AV_OPT_TYPE_CONST, {.i64=FM_PARITY_AUTO}, INT_MIN, INT_MAX, FLAGS, "field" },
132  { "bottom", "bottom field", 0, AV_OPT_TYPE_CONST, {.i64=FM_PARITY_BOTTOM}, INT_MIN, INT_MAX, FLAGS, "field" },
133  { "top", "top field", 0, AV_OPT_TYPE_CONST, {.i64=FM_PARITY_TOP}, INT_MIN, INT_MAX, FLAGS, "field" },
134  { "mchroma", "set whether or not chroma is included during the match comparisons", OFFSET(mchroma), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
135  { "y0", "define an exclusion band which excludes the lines between y0 and y1 from the field matching decision", OFFSET(y0), AV_OPT_TYPE_INT, {.i64=0}, 0, INT_MAX, FLAGS },
136  { "y1", "define an exclusion band which excludes the lines between y0 and y1 from the field matching decision", OFFSET(y1), AV_OPT_TYPE_INT, {.i64=0}, 0, INT_MAX, FLAGS },
137  { "scthresh", "set scene change detection threshold", OFFSET(scthresh_flt), AV_OPT_TYPE_DOUBLE, {.dbl=12}, 0, 100, FLAGS },
138  { "combmatch", "set combmatching mode", OFFSET(combmatch), AV_OPT_TYPE_INT, {.i64=COMBMATCH_SC}, COMBMATCH_NONE, NB_COMBMATCH-1, FLAGS, "combmatching" },
139  { "none", "disable combmatching", 0, AV_OPT_TYPE_CONST, {.i64=COMBMATCH_NONE}, INT_MIN, INT_MAX, FLAGS, "combmatching" },
140  { "sc", "enable combmatching only on scene change", 0, AV_OPT_TYPE_CONST, {.i64=COMBMATCH_SC}, INT_MIN, INT_MAX, FLAGS, "combmatching" },
141  { "full", "enable combmatching all the time", 0, AV_OPT_TYPE_CONST, {.i64=COMBMATCH_FULL}, INT_MIN, INT_MAX, FLAGS, "combmatching" },
142  { "combdbg", "enable comb debug", OFFSET(combdbg), AV_OPT_TYPE_INT, {.i64=COMBDBG_NONE}, COMBDBG_NONE, NB_COMBDBG-1, FLAGS, "dbglvl" },
143  { "none", "no forced calculation", 0, AV_OPT_TYPE_CONST, {.i64=COMBDBG_NONE}, INT_MIN, INT_MAX, FLAGS, "dbglvl" },
144  { "pcn", "calculate p/c/n", 0, AV_OPT_TYPE_CONST, {.i64=COMBDBG_PCN}, INT_MIN, INT_MAX, FLAGS, "dbglvl" },
145  { "pcnub", "calculate p/c/n/u/b", 0, AV_OPT_TYPE_CONST, {.i64=COMBDBG_PCNUB}, INT_MIN, INT_MAX, FLAGS, "dbglvl" },
146  { "cthresh", "set the area combing threshold used for combed frame detection", OFFSET(cthresh), AV_OPT_TYPE_INT, {.i64= 9}, -1, 0xff, FLAGS },
147  { "chroma", "set whether or not chroma is considered in the combed frame decision", OFFSET(chroma), AV_OPT_TYPE_BOOL,{.i64= 0}, 0, 1, FLAGS },
148  { "blockx", "set the x-axis size of the window used during combed frame detection", OFFSET(blockx), AV_OPT_TYPE_INT, {.i64=16}, 4, 1<<9, FLAGS },
149  { "blocky", "set the y-axis size of the window used during combed frame detection", OFFSET(blocky), AV_OPT_TYPE_INT, {.i64=16}, 4, 1<<9, FLAGS },
150  { "combpel", "set the number of combed pixels inside any of the blocky by blockx size blocks on the frame for the frame to be detected as combed", OFFSET(combpel), AV_OPT_TYPE_INT, {.i64=80}, 0, INT_MAX, FLAGS },
151  { NULL }
152 };
153 
154 AVFILTER_DEFINE_CLASS(fieldmatch);
155 
156 static int get_width(const FieldMatchContext *fm, const AVFrame *f, int plane, int input)
157 {
158  return plane ? AV_CEIL_RSHIFT(f->width, fm->hsub[input]) : f->width;
159 }
160 
161 static int get_height(const FieldMatchContext *fm, const AVFrame *f, int plane, int input)
162 {
163  return plane ? AV_CEIL_RSHIFT(f->height, fm->vsub[input]) : f->height;
164 }
165 
166 static int64_t luma_abs_diff(const AVFrame *f1, const AVFrame *f2)
167 {
168  int x, y;
169  const uint8_t *srcp1 = f1->data[0];
170  const uint8_t *srcp2 = f2->data[0];
171  const int src1_linesize = f1->linesize[0];
172  const int src2_linesize = f2->linesize[0];
173  const int width = f1->width;
174  const int height = f1->height;
175  int64_t acc = 0;
176 
177  for (y = 0; y < height; y++) {
178  for (x = 0; x < width; x++)
179  acc += abs(srcp1[x] - srcp2[x]);
180  srcp1 += src1_linesize;
181  srcp2 += src2_linesize;
182  }
183  return acc;
184 }
185 
186 static void fill_buf(uint8_t *data, int w, int h, int linesize, uint8_t v)
187 {
188  int y;
189 
190  for (y = 0; y < h; y++) {
191  memset(data, v, w);
192  data += linesize;
193  }
194 }
195 
196 static int calc_combed_score(const FieldMatchContext *fm, const AVFrame *src)
197 {
198  int x, y, plane, max_v = 0;
199  const int cthresh = fm->cthresh;
200  const int cthresh6 = cthresh * 6;
201 
202  for (plane = 0; plane < (fm->chroma ? 3 : 1); plane++) {
203  const uint8_t *srcp = src->data[plane];
204  const int src_linesize = src->linesize[plane];
205  const int width = get_width (fm, src, plane, INPUT_MAIN);
206  const int height = get_height(fm, src, plane, INPUT_MAIN);
207  uint8_t *cmkp = fm->cmask_data[plane];
208  const int cmk_linesize = fm->cmask_linesize[plane];
209 
210  if (cthresh < 0) {
211  fill_buf(cmkp, width, height, cmk_linesize, 0xff);
212  continue;
213  }
214  fill_buf(cmkp, width, height, cmk_linesize, 0);
215 
216  /* [1 -3 4 -3 1] vertical filter */
217 #define FILTER(xm2, xm1, xp1, xp2) \
218  abs( 4 * srcp[x] \
219  -3 * (srcp[x + (xm1)*src_linesize] + srcp[x + (xp1)*src_linesize]) \
220  + (srcp[x + (xm2)*src_linesize] + srcp[x + (xp2)*src_linesize])) > cthresh6
221 
222  /* first line */
223  for (x = 0; x < width; x++) {
224  const int s1 = abs(srcp[x] - srcp[x + src_linesize]);
225  if (s1 > cthresh && FILTER(2, 1, 1, 2))
226  cmkp[x] = 0xff;
227  }
228  srcp += src_linesize;
229  cmkp += cmk_linesize;
230 
231  /* second line */
232  for (x = 0; x < width; x++) {
233  const int s1 = abs(srcp[x] - srcp[x - src_linesize]);
234  const int s2 = abs(srcp[x] - srcp[x + src_linesize]);
235  if (s1 > cthresh && s2 > cthresh && FILTER(2, -1, 1, 2))
236  cmkp[x] = 0xff;
237  }
238  srcp += src_linesize;
239  cmkp += cmk_linesize;
240 
241  /* all lines minus first two and last two */
242  for (y = 2; y < height-2; y++) {
243  for (x = 0; x < width; x++) {
244  const int s1 = abs(srcp[x] - srcp[x - src_linesize]);
245  const int s2 = abs(srcp[x] - srcp[x + src_linesize]);
246  if (s1 > cthresh && s2 > cthresh && FILTER(-2, -1, 1, 2))
247  cmkp[x] = 0xff;
248  }
249  srcp += src_linesize;
250  cmkp += cmk_linesize;
251  }
252 
253  /* before-last line */
254  for (x = 0; x < width; x++) {
255  const int s1 = abs(srcp[x] - srcp[x - src_linesize]);
256  const int s2 = abs(srcp[x] - srcp[x + src_linesize]);
257  if (s1 > cthresh && s2 > cthresh && FILTER(-2, -1, 1, -2))
258  cmkp[x] = 0xff;
259  }
260  srcp += src_linesize;
261  cmkp += cmk_linesize;
262 
263  /* last line */
264  for (x = 0; x < width; x++) {
265  const int s1 = abs(srcp[x] - srcp[x - src_linesize]);
266  if (s1 > cthresh && FILTER(-2, -1, -1, -2))
267  cmkp[x] = 0xff;
268  }
269  }
270 
271  if (fm->chroma) {
272  uint8_t *cmkp = fm->cmask_data[0];
273  uint8_t *cmkpU = fm->cmask_data[1];
274  uint8_t *cmkpV = fm->cmask_data[2];
275  const int width = AV_CEIL_RSHIFT(src->width, fm->hsub[INPUT_MAIN]);
276  const int height = AV_CEIL_RSHIFT(src->height, fm->vsub[INPUT_MAIN]);
277  const int cmk_linesize = fm->cmask_linesize[0] << 1;
278  const int cmk_linesizeUV = fm->cmask_linesize[2];
279  uint8_t *cmkpp = cmkp - (cmk_linesize>>1);
280  uint8_t *cmkpn = cmkp + (cmk_linesize>>1);
281  uint8_t *cmkpnn = cmkp + cmk_linesize;
282  for (y = 1; y < height - 1; y++) {
283  cmkpp += cmk_linesize;
284  cmkp += cmk_linesize;
285  cmkpn += cmk_linesize;
286  cmkpnn += cmk_linesize;
287  cmkpV += cmk_linesizeUV;
288  cmkpU += cmk_linesizeUV;
289  for (x = 1; x < width - 1; x++) {
290 #define HAS_FF_AROUND(p, lz) (p[(x)-1 - (lz)] == 0xff || p[(x) - (lz)] == 0xff || p[(x)+1 - (lz)] == 0xff || \
291  p[(x)-1 ] == 0xff || p[(x)+1 ] == 0xff || \
292  p[(x)-1 + (lz)] == 0xff || p[(x) + (lz)] == 0xff || p[(x)+1 + (lz)] == 0xff)
293  if ((cmkpV[x] == 0xff && HAS_FF_AROUND(cmkpV, cmk_linesizeUV)) ||
294  (cmkpU[x] == 0xff && HAS_FF_AROUND(cmkpU, cmk_linesizeUV))) {
295  ((uint16_t*)cmkp)[x] = 0xffff;
296  ((uint16_t*)cmkpn)[x] = 0xffff;
297  if (y&1) ((uint16_t*)cmkpp)[x] = 0xffff;
298  else ((uint16_t*)cmkpnn)[x] = 0xffff;
299  }
300  }
301  }
302  }
303 
304  {
305  const int blockx = fm->blockx;
306  const int blocky = fm->blocky;
307  const int xhalf = blockx/2;
308  const int yhalf = blocky/2;
309  const int cmk_linesize = fm->cmask_linesize[0];
310  const uint8_t *cmkp = fm->cmask_data[0] + cmk_linesize;
311  const int width = src->width;
312  const int height = src->height;
313  const int xblocks = ((width+xhalf)/blockx) + 1;
314  const int xblocks4 = xblocks<<2;
315  const int yblocks = ((height+yhalf)/blocky) + 1;
316  int *c_array = fm->c_array;
317  const int arraysize = (xblocks*yblocks)<<2;
318  int heighta = (height/(blocky/2))*(blocky/2);
319  const int widtha = (width /(blockx/2))*(blockx/2);
320  if (heighta == height)
321  heighta = height - yhalf;
322  memset(c_array, 0, arraysize * sizeof(*c_array));
323 
324 #define C_ARRAY_ADD(v) do { \
325  const int box1 = (x / blockx) * 4; \
326  const int box2 = ((x + xhalf) / blockx) * 4; \
327  c_array[temp1 + box1 ] += v; \
328  c_array[temp1 + box2 + 1] += v; \
329  c_array[temp2 + box1 + 2] += v; \
330  c_array[temp2 + box2 + 3] += v; \
331 } while (0)
332 
333 #define VERTICAL_HALF(y_start, y_end) do { \
334  for (y = y_start; y < y_end; y++) { \
335  const int temp1 = (y / blocky) * xblocks4; \
336  const int temp2 = ((y + yhalf) / blocky) * xblocks4; \
337  for (x = 0; x < width; x++) \
338  if (cmkp[x - cmk_linesize] == 0xff && \
339  cmkp[x ] == 0xff && \
340  cmkp[x + cmk_linesize] == 0xff) \
341  C_ARRAY_ADD(1); \
342  cmkp += cmk_linesize; \
343  } \
344 } while (0)
345 
346  VERTICAL_HALF(1, yhalf);
347 
348  for (y = yhalf; y < heighta; y += yhalf) {
349  const int temp1 = (y / blocky) * xblocks4;
350  const int temp2 = ((y + yhalf) / blocky) * xblocks4;
351 
352  for (x = 0; x < widtha; x += xhalf) {
353  const uint8_t *cmkp_tmp = cmkp + x;
354  int u, v, sum = 0;
355  for (u = 0; u < yhalf; u++) {
356  for (v = 0; v < xhalf; v++)
357  if (cmkp_tmp[v - cmk_linesize] == 0xff &&
358  cmkp_tmp[v ] == 0xff &&
359  cmkp_tmp[v + cmk_linesize] == 0xff)
360  sum++;
361  cmkp_tmp += cmk_linesize;
362  }
363  if (sum)
364  C_ARRAY_ADD(sum);
365  }
366 
367  for (x = widtha; x < width; x++) {
368  const uint8_t *cmkp_tmp = cmkp + x;
369  int u, sum = 0;
370  for (u = 0; u < yhalf; u++) {
371  if (cmkp_tmp[-cmk_linesize] == 0xff &&
372  cmkp_tmp[ 0] == 0xff &&
373  cmkp_tmp[ cmk_linesize] == 0xff)
374  sum++;
375  cmkp_tmp += cmk_linesize;
376  }
377  if (sum)
378  C_ARRAY_ADD(sum);
379  }
380 
381  cmkp += cmk_linesize * yhalf;
382  }
383 
384  VERTICAL_HALF(heighta, height - 1);
385 
386  for (x = 0; x < arraysize; x++)
387  if (c_array[x] > max_v)
388  max_v = c_array[x];
389  }
390  return max_v;
391 }
392 
393 // the secret is that tbuffer is an interlaced, offset subset of all the lines
394 static void build_abs_diff_mask(const uint8_t *prvp, int prv_linesize,
395  const uint8_t *nxtp, int nxt_linesize,
396  uint8_t *tbuffer, int tbuf_linesize,
397  int width, int height)
398 {
399  int y, x;
400 
401  prvp -= prv_linesize;
402  nxtp -= nxt_linesize;
403  for (y = 0; y < height; y++) {
404  for (x = 0; x < width; x++)
405  tbuffer[x] = FFABS(prvp[x] - nxtp[x]);
406  prvp += prv_linesize;
407  nxtp += nxt_linesize;
408  tbuffer += tbuf_linesize;
409  }
410 }
411 
412 /**
413  * Build a map over which pixels differ a lot/a little
414  */
416  const uint8_t *prvp, int prv_linesize,
417  const uint8_t *nxtp, int nxt_linesize,
418  uint8_t *dstp, int dst_linesize, int height,
419  int width, int plane)
420 {
421  int x, y, u, diff, count;
422  int tpitch = plane ? fm->tpitchuv : fm->tpitchy;
423  const uint8_t *dp = fm->tbuffer + tpitch;
424 
425  build_abs_diff_mask(prvp, prv_linesize, nxtp, nxt_linesize,
426  fm->tbuffer, tpitch, width, height>>1);
427 
428  for (y = 2; y < height - 2; y += 2) {
429  for (x = 1; x < width - 1; x++) {
430  diff = dp[x];
431  if (diff > 3) {
432  for (count = 0, u = x-1; u < x+2 && count < 2; u++) {
433  count += dp[u-tpitch] > 3;
434  count += dp[u ] > 3;
435  count += dp[u+tpitch] > 3;
436  }
437  if (count > 1) {
438  dstp[x] = 1;
439  if (diff > 19) {
440  int upper = 0, lower = 0;
441  for (count = 0, u = x-1; u < x+2 && count < 6; u++) {
442  if (dp[u-tpitch] > 19) { count++; upper = 1; }
443  if (dp[u ] > 19) count++;
444  if (dp[u+tpitch] > 19) { count++; lower = 1; }
445  }
446  if (count > 3) {
447  if (upper && lower) {
448  dstp[x] |= 1<<1;
449  } else {
450  int upper2 = 0, lower2 = 0;
451  for (u = FFMAX(x-4,0); u < FFMIN(x+5,width); u++) {
452  if (y != 2 && dp[u-2*tpitch] > 19) upper2 = 1;
453  if ( dp[u- tpitch] > 19) upper = 1;
454  if ( dp[u+ tpitch] > 19) lower = 1;
455  if (y != height-4 && dp[u+2*tpitch] > 19) lower2 = 1;
456  }
457  if ((upper && (lower || upper2)) ||
458  (lower && (upper || lower2)))
459  dstp[x] |= 1<<1;
460  else if (count > 5)
461  dstp[x] |= 1<<2;
462  }
463  }
464  }
465  }
466  }
467  }
468  dp += tpitch;
469  dstp += dst_linesize;
470  }
471 }
472 
473 enum { mP, mC, mN, mB, mU };
474 
475 static int get_field_base(int match, int field)
476 {
477  return match < 3 ? 2 - field : 1 + field;
478 }
479 
480 static AVFrame *select_frame(FieldMatchContext *fm, int match)
481 {
482  if (match == mP || match == mB) return fm->prv;
483  else if (match == mN || match == mU) return fm->nxt;
484  else /* match == mC */ return fm->src;
485 }
486 
487 static int compare_fields(FieldMatchContext *fm, int match1, int match2, int field)
488 {
489  int plane, ret;
490  uint64_t accumPc = 0, accumPm = 0, accumPml = 0;
491  uint64_t accumNc = 0, accumNm = 0, accumNml = 0;
492  int norm1, norm2, mtn1, mtn2;
493  float c1, c2, mr;
494  const AVFrame *src = fm->src;
495 
496  for (plane = 0; plane < (fm->mchroma ? 3 : 1); plane++) {
497  int x, y, temp1, temp2, fbase;
498  const AVFrame *prev, *next;
499  uint8_t *mapp = fm->map_data[plane];
500  int map_linesize = fm->map_linesize[plane];
501  const uint8_t *srcp = src->data[plane];
502  const int src_linesize = src->linesize[plane];
503  const int srcf_linesize = src_linesize << 1;
504  int prv_linesize, nxt_linesize;
505  int prvf_linesize, nxtf_linesize;
506  const int width = get_width (fm, src, plane, INPUT_MAIN);
507  const int height = get_height(fm, src, plane, INPUT_MAIN);
508  const int y0a = fm->y0 >> (plane ? fm->vsub[INPUT_MAIN] : 0);
509  const int y1a = fm->y1 >> (plane ? fm->vsub[INPUT_MAIN] : 0);
510  const int startx = (plane == 0 ? 8 : 8 >> fm->hsub[INPUT_MAIN]);
511  const int stopx = width - startx;
512  const uint8_t *srcpf, *srcf, *srcnf;
513  const uint8_t *prvpf, *prvnf, *nxtpf, *nxtnf;
514 
515  fill_buf(mapp, width, height, map_linesize, 0);
516 
517  /* match1 */
518  fbase = get_field_base(match1, field);
519  srcf = srcp + (fbase + 1) * src_linesize;
520  srcpf = srcf - srcf_linesize;
521  srcnf = srcf + srcf_linesize;
522  mapp = mapp + fbase * map_linesize;
523  prev = select_frame(fm, match1);
524  prv_linesize = prev->linesize[plane];
525  prvf_linesize = prv_linesize << 1;
526  prvpf = prev->data[plane] + fbase * prv_linesize; // previous frame, previous field
527  prvnf = prvpf + prvf_linesize; // previous frame, next field
528 
529  /* match2 */
530  fbase = get_field_base(match2, field);
531  next = select_frame(fm, match2);
532  nxt_linesize = next->linesize[plane];
533  nxtf_linesize = nxt_linesize << 1;
534  nxtpf = next->data[plane] + fbase * nxt_linesize; // next frame, previous field
535  nxtnf = nxtpf + nxtf_linesize; // next frame, next field
536 
537  map_linesize <<= 1;
538  if ((match1 >= 3 && field == 1) || (match1 < 3 && field != 1))
539  build_diff_map(fm, prvpf, prvf_linesize, nxtpf, nxtf_linesize,
540  mapp, map_linesize, height, width, plane);
541  else
542  build_diff_map(fm, prvnf, prvf_linesize, nxtnf, nxtf_linesize,
543  mapp + map_linesize, map_linesize, height, width, plane);
544 
545  for (y = 2; y < height - 2; y += 2) {
546  if (y0a == y1a || y < y0a || y > y1a) {
547  for (x = startx; x < stopx; x++) {
548  if (mapp[x] > 0 || mapp[x + map_linesize] > 0) {
549  temp1 = srcpf[x] + (srcf[x] << 2) + srcnf[x]; // [1 4 1]
550 
551  temp2 = abs(3 * (prvpf[x] + prvnf[x]) - temp1);
552  if (temp2 > 23 && ((mapp[x]&1) || (mapp[x + map_linesize]&1)))
553  accumPc += temp2;
554  if (temp2 > 42) {
555  if ((mapp[x]&2) || (mapp[x + map_linesize]&2))
556  accumPm += temp2;
557  if ((mapp[x]&4) || (mapp[x + map_linesize]&4))
558  accumPml += temp2;
559  }
560 
561  temp2 = abs(3 * (nxtpf[x] + nxtnf[x]) - temp1);
562  if (temp2 > 23 && ((mapp[x]&1) || (mapp[x + map_linesize]&1)))
563  accumNc += temp2;
564  if (temp2 > 42) {
565  if ((mapp[x]&2) || (mapp[x + map_linesize]&2))
566  accumNm += temp2;
567  if ((mapp[x]&4) || (mapp[x + map_linesize]&4))
568  accumNml += temp2;
569  }
570  }
571  }
572  }
573  prvpf += prvf_linesize;
574  prvnf += prvf_linesize;
575  srcpf += srcf_linesize;
576  srcf += srcf_linesize;
577  srcnf += srcf_linesize;
578  nxtpf += nxtf_linesize;
579  nxtnf += nxtf_linesize;
580  mapp += map_linesize;
581  }
582  }
583 
584  if (accumPm < 500 && accumNm < 500 && (accumPml >= 500 || accumNml >= 500) &&
585  FFMAX(accumPml,accumNml) > 3*FFMIN(accumPml,accumNml)) {
586  accumPm = accumPml;
587  accumNm = accumNml;
588  }
589 
590  norm1 = (int)((accumPc / 6.0f) + 0.5f);
591  norm2 = (int)((accumNc / 6.0f) + 0.5f);
592  mtn1 = (int)((accumPm / 6.0f) + 0.5f);
593  mtn2 = (int)((accumNm / 6.0f) + 0.5f);
594  c1 = ((float)FFMAX(norm1,norm2)) / ((float)FFMAX(FFMIN(norm1,norm2),1));
595  c2 = ((float)FFMAX(mtn1, mtn2)) / ((float)FFMAX(FFMIN(mtn1, mtn2), 1));
596  mr = ((float)FFMAX(mtn1, mtn2)) / ((float)FFMAX(FFMAX(norm1,norm2),1));
597  if (((mtn1 >= 500 || mtn2 >= 500) && (mtn1*2 < mtn2*1 || mtn2*2 < mtn1*1)) ||
598  ((mtn1 >= 1000 || mtn2 >= 1000) && (mtn1*3 < mtn2*2 || mtn2*3 < mtn1*2)) ||
599  ((mtn1 >= 2000 || mtn2 >= 2000) && (mtn1*5 < mtn2*4 || mtn2*5 < mtn1*4)) ||
600  ((mtn1 >= 4000 || mtn2 >= 4000) && c2 > c1))
601  ret = mtn1 > mtn2 ? match2 : match1;
602  else if (mr > 0.005 && FFMAX(mtn1, mtn2) > 150 && (mtn1*2 < mtn2*1 || mtn2*2 < mtn1*1))
603  ret = mtn1 > mtn2 ? match2 : match1;
604  else
605  ret = norm1 > norm2 ? match2 : match1;
606  return ret;
607 }
608 
609 static void copy_fields(const FieldMatchContext *fm, AVFrame *dst,
610  const AVFrame *src, int field, int input)
611 {
612  int plane;
613  for (plane = 0; plane < 4 && src->data[plane] && src->linesize[plane]; plane++) {
614  const int plane_h = get_height(fm, src, plane, input);
615  const int nb_copy_fields = (plane_h >> 1) + (field ? 0 : (plane_h & 1));
616  av_image_copy_plane(dst->data[plane] + field*dst->linesize[plane], dst->linesize[plane] << 1,
617  src->data[plane] + field*src->linesize[plane], src->linesize[plane] << 1,
618  get_width(fm, src, plane, input) * fm->bpc, nb_copy_fields);
619  }
620 }
621 
623  const AVFrame *prv, AVFrame *src, const AVFrame *nxt, int input)
624 {
625  AVFrame *dst;
626  FieldMatchContext *fm = ctx->priv;
627 
628  if (match == mC) {
629  dst = av_frame_clone(src);
630  } else {
631  AVFilterLink *link = input == INPUT_CLEANSRC ? ctx->outputs[0] : ctx->inputs[INPUT_MAIN];
632 
633  dst = ff_get_video_buffer(link, link->w, link->h);
634  if (!dst)
635  return NULL;
636  av_frame_copy_props(dst, src);
637 
638  switch (match) {
639  case mP: copy_fields(fm, dst, src, 1-field, input); copy_fields(fm, dst, prv, field, input); break;
640  case mN: copy_fields(fm, dst, src, 1-field, input); copy_fields(fm, dst, nxt, field, input); break;
641  case mB: copy_fields(fm, dst, src, field, input); copy_fields(fm, dst, prv, 1-field, input); break;
642  case mU: copy_fields(fm, dst, src, field, input); copy_fields(fm, dst, nxt, 1-field, input); break;
643  default: av_assert0(0);
644  }
645  }
646  return dst;
647 }
648 
649 static int checkmm(AVFilterContext *ctx, int *combs, int m1, int m2,
650  AVFrame **gen_frames, int field)
651 {
652  const FieldMatchContext *fm = ctx->priv;
653 
654 #define LOAD_COMB(mid) do { \
655  if (combs[mid] < 0) { \
656  if (!gen_frames[mid]) \
657  gen_frames[mid] = create_weave_frame(ctx, mid, field, \
658  fm->prv, fm->src, fm->nxt, \
659  INPUT_MAIN); \
660  combs[mid] = calc_combed_score(fm, gen_frames[mid]); \
661  } \
662 } while (0)
663 
664  LOAD_COMB(m1);
665  LOAD_COMB(m2);
666 
667  if ((combs[m2] * 3 < combs[m1] || (combs[m2] * 2 < combs[m1] && combs[m1] > fm->combpel)) &&
668  abs(combs[m2] - combs[m1]) >= 30 && combs[m2] < fm->combpel)
669  return m2;
670  else
671  return m1;
672 }
673 
674 static const int fxo0m[] = { mP, mC, mN, mB, mU };
675 static const int fxo1m[] = { mN, mC, mP, mU, mB };
676 
678 {
679  AVFilterContext *ctx = inlink->dst;
680  AVFilterLink *outlink = ctx->outputs[0];
681  FieldMatchContext *fm = ctx->priv;
682  int combs[] = { -1, -1, -1, -1, -1 };
683  int order, field, i, match, interlaced_frame, sc = 0, ret = 0;
684  const int *fxo;
685  AVFrame *gen_frames[] = { NULL, NULL, NULL, NULL, NULL };
686  AVFrame *dst = NULL;
687 
688  /* update frames queue(s) */
689 #define SLIDING_FRAME_WINDOW(prv, src, nxt) do { \
690  if (prv != src) /* 2nd loop exception (1st has prv==src and we don't want to loose src) */ \
691  av_frame_free(&prv); \
692  prv = src; \
693  src = nxt; \
694  if (in) \
695  nxt = in; \
696  if (!prv) \
697  prv = src; \
698  if (!prv) /* received only one frame at that point */ \
699  return 0; \
700  av_assert0(prv && src && nxt); \
701 } while (0)
702  if (FF_INLINK_IDX(inlink) == INPUT_MAIN) {
703  av_assert0(fm->got_frame[INPUT_MAIN] == 0);
704  SLIDING_FRAME_WINDOW(fm->prv, fm->src, fm->nxt);
705  fm->got_frame[INPUT_MAIN] = 1;
706  } else {
708  SLIDING_FRAME_WINDOW(fm->prv2, fm->src2, fm->nxt2);
709  fm->got_frame[INPUT_CLEANSRC] = 1;
710  }
711  if (!fm->got_frame[INPUT_MAIN] || (fm->ppsrc && !fm->got_frame[INPUT_CLEANSRC]))
712  return 0;
714  in = fm->src;
715 
716  /* parity */
717  order = fm->order != FM_PARITY_AUTO ? fm->order : (in->interlaced_frame ? in->top_field_first : 1);
718  field = fm->field != FM_PARITY_AUTO ? fm->field : order;
719  av_assert0(order == 0 || order == 1 || field == 0 || field == 1);
720  fxo = field ^ order ? fxo1m : fxo0m;
721 
722  /* debug mode: we generate all the fields combinations and their associated
723  * combed score. XXX: inject as frame metadata? */
724  if (fm->combdbg) {
725  for (i = 0; i < FF_ARRAY_ELEMS(combs); i++) {
726  if (i > mN && fm->combdbg == COMBDBG_PCN)
727  break;
728  gen_frames[i] = create_weave_frame(ctx, i, field, fm->prv, fm->src, fm->nxt, INPUT_MAIN);
729  if (!gen_frames[i]) {
730  ret = AVERROR(ENOMEM);
731  goto fail;
732  }
733  combs[i] = calc_combed_score(fm, gen_frames[i]);
734  }
735  av_log(ctx, AV_LOG_INFO, "COMBS: %3d %3d %3d %3d %3d\n",
736  combs[0], combs[1], combs[2], combs[3], combs[4]);
737  } else {
738  gen_frames[mC] = av_frame_clone(fm->src);
739  if (!gen_frames[mC]) {
740  ret = AVERROR(ENOMEM);
741  goto fail;
742  }
743  }
744 
745  /* p/c selection and optional 3-way p/c/n matches */
746  match = compare_fields(fm, fxo[mC], fxo[mP], field);
747  if (fm->mode == MODE_PCN || fm->mode == MODE_PCN_UB)
748  match = compare_fields(fm, match, fxo[mN], field);
749 
750  /* scene change check */
751  if (fm->combmatch == COMBMATCH_SC) {
752  if (fm->lastn == outlink->frame_count_in - 1) {
753  if (fm->lastscdiff > fm->scthresh)
754  sc = 1;
755  } else if (luma_abs_diff(fm->prv, fm->src) > fm->scthresh) {
756  sc = 1;
757  }
758 
759  if (!sc) {
760  fm->lastn = outlink->frame_count_in;
761  fm->lastscdiff = luma_abs_diff(fm->src, fm->nxt);
762  sc = fm->lastscdiff > fm->scthresh;
763  }
764  }
765 
766  if (fm->combmatch == COMBMATCH_FULL || (fm->combmatch == COMBMATCH_SC && sc)) {
767  switch (fm->mode) {
768  /* 2-way p/c matches */
769  case MODE_PC:
770  match = checkmm(ctx, combs, match, match == fxo[mP] ? fxo[mC] : fxo[mP], gen_frames, field);
771  break;
772  case MODE_PC_N:
773  match = checkmm(ctx, combs, match, fxo[mN], gen_frames, field);
774  break;
775  case MODE_PC_U:
776  match = checkmm(ctx, combs, match, fxo[mU], gen_frames, field);
777  break;
778  case MODE_PC_N_UB:
779  match = checkmm(ctx, combs, match, fxo[mN], gen_frames, field);
780  match = checkmm(ctx, combs, match, fxo[mU], gen_frames, field);
781  match = checkmm(ctx, combs, match, fxo[mB], gen_frames, field);
782  break;
783  /* 3-way p/c/n matches */
784  case MODE_PCN:
785  match = checkmm(ctx, combs, match, match == fxo[mP] ? fxo[mC] : fxo[mP], gen_frames, field);
786  break;
787  case MODE_PCN_UB:
788  match = checkmm(ctx, combs, match, fxo[mU], gen_frames, field);
789  match = checkmm(ctx, combs, match, fxo[mB], gen_frames, field);
790  break;
791  default:
792  av_assert0(0);
793  }
794  }
795 
796  /* keep fields as-is if not matched properly */
797  interlaced_frame = combs[match] >= fm->combpel;
798  if (interlaced_frame && fm->combmatch == COMBMATCH_FULL) {
799  match = mC;
800  }
801 
802  /* get output frame and drop the others */
803  if (fm->ppsrc) {
804  /* field matching was based on a filtered/post-processed input, we now
805  * pick the untouched fields from the clean source */
806  dst = create_weave_frame(ctx, match, field, fm->prv2, fm->src2, fm->nxt2, INPUT_CLEANSRC);
807  } else {
808  if (!gen_frames[match]) { // XXX: is that possible?
809  dst = create_weave_frame(ctx, match, field, fm->prv, fm->src, fm->nxt, INPUT_MAIN);
810  } else {
811  dst = gen_frames[match];
812  gen_frames[match] = NULL;
813  }
814  }
815  if (!dst) {
816  ret = AVERROR(ENOMEM);
817  goto fail;
818  }
819 
820  /* mark the frame we are unable to match properly as interlaced so a proper
821  * de-interlacer can take the relay */
822  dst->interlaced_frame = interlaced_frame;
823  if (dst->interlaced_frame) {
824  av_log(ctx, AV_LOG_WARNING, "Frame #%"PRId64" at %s is still interlaced\n",
825  outlink->frame_count_in, av_ts2timestr(in->pts, &inlink->time_base));
826  dst->top_field_first = field;
827  }
828 
829  av_log(ctx, AV_LOG_DEBUG, "SC:%d | COMBS: %3d %3d %3d %3d %3d (combpel=%d)"
830  " match=%d combed=%s\n", sc, combs[0], combs[1], combs[2], combs[3], combs[4],
831  fm->combpel, match, dst->interlaced_frame ? "YES" : "NO");
832 
833 fail:
834  for (i = 0; i < FF_ARRAY_ELEMS(gen_frames); i++)
835  av_frame_free(&gen_frames[i]);
836 
837  if (ret >= 0)
838  return ff_filter_frame(outlink, dst);
839  return ret;
840 }
841 
843 {
844  FieldMatchContext *fm = ctx->priv;
845  AVFrame *frame = NULL;
846  int ret = 0, status;
847  int64_t pts;
848 
850 
851  if ((fm->got_frame[INPUT_MAIN] == 0) &&
852  (ret = ff_inlink_consume_frame(ctx->inputs[INPUT_MAIN], &frame)) > 0) {
853  ret = filter_frame(ctx->inputs[INPUT_MAIN], frame);
854  if (ret < 0)
855  return ret;
856  }
857  if (ret < 0)
858  return ret;
859  if (fm->ppsrc &&
860  (fm->got_frame[INPUT_CLEANSRC] == 0) &&
861  (ret = ff_inlink_consume_frame(ctx->inputs[INPUT_CLEANSRC], &frame)) > 0) {
862  ret = filter_frame(ctx->inputs[INPUT_CLEANSRC], frame);
863  if (ret < 0)
864  return ret;
865  }
866  if (ret < 0) {
867  return ret;
868  } else if (ff_inlink_acknowledge_status(ctx->inputs[INPUT_MAIN], &status, &pts)) {
869  if (status == AVERROR_EOF) { // flushing
870  fm->eof |= 1 << INPUT_MAIN;
871  ret = filter_frame(ctx->inputs[INPUT_MAIN], NULL);
872  }
873  ff_outlink_set_status(ctx->outputs[0], status, pts);
874  return ret;
875  } else if (fm->ppsrc && ff_inlink_acknowledge_status(ctx->inputs[INPUT_CLEANSRC], &status, &pts)) {
876  if (status == AVERROR_EOF) { // flushing
877  fm->eof |= 1 << INPUT_CLEANSRC;
878  ret = filter_frame(ctx->inputs[INPUT_CLEANSRC], NULL);
879  }
880  ff_outlink_set_status(ctx->outputs[0], status, pts);
881  return ret;
882  } else {
883  if (ff_outlink_frame_wanted(ctx->outputs[0])) {
884  if (fm->got_frame[INPUT_MAIN] == 0)
886  if (fm->ppsrc && (fm->got_frame[INPUT_CLEANSRC] == 0))
888  }
889  return 0;
890  }
891 }
892 
894 {
895  FieldMatchContext *fm = ctx->priv;
896 
897  static const enum AVPixelFormat pix_fmts[] = {
901  };
902  static const enum AVPixelFormat unproc_pix_fmts[] = {
917  };
918  int ret;
919 
921  if (!fmts_list)
922  return AVERROR(ENOMEM);
923  if (!fm->ppsrc) {
924  return ff_set_common_formats(ctx, fmts_list);
925  }
926 
927  if ((ret = ff_formats_ref(fmts_list, &ctx->inputs[INPUT_MAIN]->outcfg.formats)) < 0)
928  return ret;
929  fmts_list = ff_make_format_list(unproc_pix_fmts);
930  if (!fmts_list)
931  return AVERROR(ENOMEM);
932  if ((ret = ff_formats_ref(fmts_list, &ctx->outputs[0]->incfg.formats)) < 0)
933  return ret;
934  if ((ret = ff_formats_ref(fmts_list, &ctx->inputs[INPUT_CLEANSRC]->outcfg.formats)) < 0)
935  return ret;
936  return 0;
937 }
938 
940 {
941  int ret;
942  AVFilterContext *ctx = inlink->dst;
943  FieldMatchContext *fm = ctx->priv;
944  const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(inlink->format);
945  const int w = inlink->w;
946  const int h = inlink->h;
947 
948  fm->scthresh = (int64_t)((w * h * 255.0 * fm->scthresh_flt) / 100.0);
949 
950  if ((ret = av_image_alloc(fm->map_data, fm->map_linesize, w, h, inlink->format, 32)) < 0 ||
951  (ret = av_image_alloc(fm->cmask_data, fm->cmask_linesize, w, h, inlink->format, 32)) < 0)
952  return ret;
953 
954  fm->hsub[INPUT_MAIN] = pix_desc->log2_chroma_w;
955  fm->vsub[INPUT_MAIN] = pix_desc->log2_chroma_h;
956  if (fm->ppsrc) {
957  pix_desc = av_pix_fmt_desc_get(ctx->inputs[INPUT_CLEANSRC]->format);
958  fm->hsub[INPUT_CLEANSRC] = pix_desc->log2_chroma_w;
959  fm->vsub[INPUT_CLEANSRC] = pix_desc->log2_chroma_h;
960  }
961 
962  fm->tpitchy = FFALIGN(w, 16);
963  fm->tpitchuv = FFALIGN(w >> 1, 16);
964 
965  fm->tbuffer = av_calloc((h/2 + 4) * fm->tpitchy, sizeof(*fm->tbuffer));
966  fm->c_array = av_malloc_array((((w + fm->blockx/2)/fm->blockx)+1) *
967  (((h + fm->blocky/2)/fm->blocky)+1),
968  4 * sizeof(*fm->c_array));
969  if (!fm->tbuffer || !fm->c_array)
970  return AVERROR(ENOMEM);
971 
972  return 0;
973 }
974 
976 {
977  const FieldMatchContext *fm = ctx->priv;
978  AVFilterPad pad = {
979  .name = "main",
980  .type = AVMEDIA_TYPE_VIDEO,
981  .config_props = config_input,
982  };
983  int ret;
984 
985  if ((ret = ff_append_inpad(ctx, &pad)) < 0)
986  return ret;
987 
988  if (fm->ppsrc) {
989  pad.name = "clean_src";
990  pad.config_props = NULL;
991  if ((ret = ff_append_inpad(ctx, &pad)) < 0)
992  return ret;
993  }
994 
995  if ((fm->blockx & (fm->blockx - 1)) ||
996  (fm->blocky & (fm->blocky - 1))) {
997  av_log(ctx, AV_LOG_ERROR, "blockx and blocky settings must be power of two\n");
998  return AVERROR(EINVAL);
999  }
1000 
1001  if (fm->combpel > fm->blockx * fm->blocky) {
1002  av_log(ctx, AV_LOG_ERROR, "Combed pixel should not be larger than blockx x blocky\n");
1003  return AVERROR(EINVAL);
1004  }
1005 
1006  return 0;
1007 }
1008 
1010 {
1011  FieldMatchContext *fm = ctx->priv;
1012 
1013  if (fm->prv != fm->src)
1014  av_frame_free(&fm->prv);
1015  if (fm->nxt != fm->src)
1016  av_frame_free(&fm->nxt);
1017  if (fm->prv2 != fm->src2)
1018  av_frame_free(&fm->prv2);
1019  if (fm->nxt2 != fm->src2)
1020  av_frame_free(&fm->nxt2);
1021  av_frame_free(&fm->src);
1022  av_frame_free(&fm->src2);
1023  av_freep(&fm->map_data[0]);
1024  av_freep(&fm->cmask_data[0]);
1025  av_freep(&fm->tbuffer);
1026  av_freep(&fm->c_array);
1027 }
1028 
1029 static int config_output(AVFilterLink *outlink)
1030 {
1031  AVFilterContext *ctx = outlink->src;
1032  FieldMatchContext *fm = ctx->priv;
1033  const AVFilterLink *inlink =
1034  ctx->inputs[fm->ppsrc ? INPUT_CLEANSRC : INPUT_MAIN];
1036 
1037  fm->bpc = (desc->comp[0].depth + 7) / 8;
1038  outlink->time_base = inlink->time_base;
1039  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
1040  outlink->frame_rate = inlink->frame_rate;
1041  outlink->w = inlink->w;
1042  outlink->h = inlink->h;
1043  return 0;
1044 }
1045 
1047  {
1048  .name = "default",
1049  .type = AVMEDIA_TYPE_VIDEO,
1050  .config_props = config_output,
1051  },
1052 };
1053 
1055  .name = "fieldmatch",
1056  .description = NULL_IF_CONFIG_SMALL("Field matching for inverse telecine."),
1057  .priv_size = sizeof(FieldMatchContext),
1058  .init = fieldmatch_init,
1059  .activate = activate,
1061  .inputs = NULL,
1064  .priv_class = &fieldmatch_class,
1066 };
FieldMatchContext::order
int order
Definition: vf_fieldmatch.c:89
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:101
FieldMatchContext::mode
int mode
matching_mode
Definition: vf_fieldmatch.c:91
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
status
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
build_diff_map
static void build_diff_map(FieldMatchContext *fm, const uint8_t *prvp, int prv_linesize, const uint8_t *nxtp, int nxt_linesize, uint8_t *dstp, int dst_linesize, int height, int width, int plane)
Build a map over which pixels differ a lot/a little.
Definition: vf_fieldmatch.c:415
FLAGS
#define FLAGS
Definition: vf_fieldmatch.c:115
acc
int acc
Definition: yuv2rgb.c:554
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:380
FieldMatchContext::blocky
int blocky
Definition: vf_fieldmatch.c:101
NB_COMBMATCH
@ NB_COMBMATCH
Definition: vf_fieldmatch.c:66
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:262
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:969
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2888
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
FieldMatchContext::mchroma
int mchroma
Definition: vf_fieldmatch.c:93
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:99
build_abs_diff_mask
static void build_abs_diff_mask(const uint8_t *prvp, int prv_linesize, const uint8_t *nxtp, int nxt_linesize, uint8_t *tbuffer, int tbuf_linesize, int width, int height)
Definition: vf_fieldmatch.c:394
FieldMatchContext::src
AVFrame * src
Definition: vf_fieldmatch.c:79
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:437
AVFrame::width
int width
Definition: frame.h:402
w
uint8_t w
Definition: llviddspenc.c:38
AVFrame::top_field_first
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:491
AVOption
AVOption.
Definition: opt.h:251
chroma
static av_always_inline void chroma(WaveformContext *s, AVFrame *in, AVFrame *out, int component, int intensity, int offset_y, int offset_x, int column, int mirror, int jobnr, int nb_jobs)
Definition: vf_waveform.c:1635
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: internal.h:171
COMBDBG_PCN
@ COMBDBG_PCN
Definition: vf_fieldmatch.c:71
data
const char data[16]
Definition: mxf.c:146
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:459
LOAD_COMB
#define LOAD_COMB(mid)
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
activate
static int activate(AVFilterContext *ctx)
Definition: vf_fieldmatch.c:842
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:165
c1
static const uint64_t c1
Definition: murmur3.c:51
MODE_PC_U
@ MODE_PC_U
Definition: vf_fieldmatch.c:55
compare_fields
static int compare_fields(FieldMatchContext *fm, int match1, int match2, int field)
Definition: vf_fieldmatch.c:487
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:351
av_image_copy_plane
void av_image_copy_plane(uint8_t *dst, int dst_linesize, const uint8_t *src, int src_linesize, int bytewidth, int height)
Copy image plane from src to dst.
Definition: imgutils.c:374
FieldMatchContext::map_data
uint8_t * map_data[4]
Definition: vf_fieldmatch.c:105
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
mU
@ mU
Definition: vf_fieldmatch.c:473
ff_inlink_consume_frame
int ff_inlink_consume_frame(AVFilterLink *link, AVFrame **rframe)
Take a frame from the link's FIFO and update the link's stats.
Definition: avfilter.c:1364
FF_FILTER_FORWARD_STATUS_BACK_ALL
#define FF_FILTER_FORWARD_STATUS_BACK_ALL(outlink, filter)
Forward the status on an output link to all input links.
Definition: filters.h:212
MODE_PCN_UB
@ MODE_PCN_UB
Definition: vf_fieldmatch.c:58
ff_append_inpad
int ff_append_inpad(AVFilterContext *f, AVFilterPad *p)
Append a new input/output pad to the filter's list of such pads.
Definition: avfilter.c:126
fxo0m
static const int fxo0m[]
Definition: vf_fieldmatch.c:674
MODE_PC
@ MODE_PC
Definition: vf_fieldmatch.c:53
checkmm
static int checkmm(AVFilterContext *ctx, int *combs, int m1, int m2, AVFrame **gen_frames, int field)
Definition: vf_fieldmatch.c:649
fail
#define fail()
Definition: checkasm.h:134
FieldMatchContext::src2
AVFrame * src2
Definition: vf_fieldmatch.c:80
copy_fields
static void copy_fields(const FieldMatchContext *fm, AVFrame *dst, const AVFrame *src, int field, int input)
Definition: vf_fieldmatch.c:609
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:457
NB_COMBDBG
@ NB_COMBDBG
Definition: vf_fieldmatch.c:73
FieldMatchContext::cmask_data
uint8_t * cmask_data[4]
Definition: vf_fieldmatch.c:107
HAS_FF_AROUND
#define HAS_FF_AROUND(p, lz)
mC
@ mC
Definition: vf_fieldmatch.c:473
pts
static int64_t pts
Definition: transcode_aac.c:653
AVFILTER_FLAG_DYNAMIC_INPUTS
#define AVFILTER_FLAG_DYNAMIC_INPUTS
The number of the filter inputs is not determined just by AVFilter.inputs.
Definition: avfilter.h:106
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:49
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:462
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:276
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:471
ff_set_common_formats
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:749
FieldMatchContext::y1
int y1
Definition: vf_fieldmatch.c:94
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
float
float
Definition: af_crystalizer.c:122
ff_outlink_set_status
static void ff_outlink_set_status(AVFilterLink *link, int status, int64_t pts)
Set the status field of a link from the source filter.
Definition: filters.h:189
FieldMatchContext::map_linesize
int map_linesize[4]
Definition: vf_fieldmatch.c:106
ff_inlink_request_frame
void ff_inlink_request_frame(AVFilterLink *link)
Mark that a frame is wanted on the link.
Definition: avfilter.c:1481
width
#define width
FieldMatchContext::scthresh_flt
double scthresh_flt
Definition: vf_fieldmatch.c:96
get_height
static int get_height(const FieldMatchContext *fm, const AVFrame *f, int plane, int input)
Definition: vf_fieldmatch.c:161
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:472
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:50
OFFSET
#define OFFSET(x)
Definition: vf_fieldmatch.c:114
fieldmatch_parity
fieldmatch_parity
Definition: vf_fieldmatch.c:46
AV_OPT_TYPE_DOUBLE
@ AV_OPT_TYPE_DOUBLE
Definition: opt.h:227
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_fieldmatch.c:1029
s1
#define s1
Definition: regdef.h:38
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:596
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts_bsf.c:365
fieldmatch_outputs
static const AVFilterPad fieldmatch_outputs[]
Definition: vf_fieldmatch.c:1046
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
filters.h
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:456
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
FieldMatchContext::chroma
int chroma
Definition: vf_fieldmatch.c:100
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:470
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:465
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AVPixFmtDescriptor::log2_chroma_w
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:80
MODE_PC_N_UB
@ MODE_PC_N_UB
Definition: vf_fieldmatch.c:56
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
FFABS
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:64
FM_PARITY_AUTO
@ FM_PARITY_AUTO
Definition: vf_fieldmatch.c:47
FieldMatchContext::nxt
AVFrame * nxt
main sliding window of 3 frames
Definition: vf_fieldmatch.c:79
FieldMatchContext::hsub
int hsub[2]
Definition: vf_fieldmatch.c:82
COMBMATCH_SC
@ COMBMATCH_SC
Definition: vf_fieldmatch.c:64
fieldmatch_options
static const AVOption fieldmatch_options[]
Definition: vf_fieldmatch.c:117
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:594
FieldMatchContext::scthresh
int64_t scthresh
Definition: vf_fieldmatch.c:95
FieldMatchContext::combmatch
int combmatch
comb_matching_mode
Definition: vf_fieldmatch.c:97
fill_buf
static void fill_buf(uint8_t *data, int w, int h, int linesize, uint8_t v)
Definition: vf_fieldmatch.c:186
FieldMatchContext::field
int field
Definition: vf_fieldmatch.c:92
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
FieldMatchContext::bpc
int bpc
bytes per component
Definition: vf_fieldmatch.c:83
FieldMatchContext::combpel
int combpel
Definition: vf_fieldmatch.c:102
mP
@ mP
Definition: vf_fieldmatch.c:473
AV_PIX_FMT_YUV440P10
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:461
FieldMatchContext::ppsrc
int ppsrc
Definition: vf_fieldmatch.c:90
FieldMatchContext::got_frame
int got_frame[2]
frame request flag for each input stream
Definition: vf_fieldmatch.c:81
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:460
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
COMBMATCH_FULL
@ COMBMATCH_FULL
Definition: vf_fieldmatch.c:65
ff_inlink_acknowledge_status
int ff_inlink_acknowledge_status(AVFilterLink *link, int *rstatus, int64_t *rpts)
Test and acknowledge the change of status on the link.
Definition: avfilter.c:1318
FieldMatchContext::vsub
int vsub[2]
chroma subsampling values
Definition: vf_fieldmatch.c:82
FieldMatchContext
Definition: vf_fieldmatch.c:76
comb_dbg
comb_dbg
Definition: vf_fieldmatch.c:69
av_image_alloc
int av_image_alloc(uint8_t *pointers[4], int linesizes[4], int w, int h, enum AVPixelFormat pix_fmt, int align)
Allocate an image with size w and h and pixel format pix_fmt, and fill pointers and linesizes accordi...
Definition: imgutils.c:218
AVFilterPad::config_props
int(* config_props)(AVFilterLink *link)
Link configuration callback.
Definition: internal.h:129
s2
#define s2
Definition: regdef.h:39
NB_MODE
@ NB_MODE
Definition: vf_fieldmatch.c:59
f
f
Definition: af_crystalizer.c:122
av_ts2timestr
#define av_ts2timestr(ts, tb)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: timestamp.h:76
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:115
MODE_PC_N
@ MODE_PC_N
Definition: vf_fieldmatch.c:54
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:464
ff_vf_fieldmatch
const AVFilter ff_vf_fieldmatch
Definition: vf_fieldmatch.c:1054
comb_matching_mode
comb_matching_mode
Definition: vf_fieldmatch.c:62
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:466
config_input
static int config_input(AVFilterLink *inlink)
Definition: vf_fieldmatch.c:939
mB
@ mB
Definition: vf_fieldmatch.c:473
FieldMatchContext::tbuffer
uint8_t * tbuffer
Definition: vf_fieldmatch.c:111
diff
static av_always_inline int diff(const struct color_info *a, const struct color_info *b, const int trans_thresh)
Definition: vf_paletteuse.c:162
FieldMatchContext::cthresh
int cthresh
Definition: vf_fieldmatch.c:99
height
#define height
FM_PARITY_BOTTOM
@ FM_PARITY_BOTTOM
Definition: vf_fieldmatch.c:48
INPUT_MAIN
#define INPUT_MAIN
Definition: vf_fieldmatch.c:43
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_fieldmatch.c:893
FieldMatchContext::nxt2
AVFrame * nxt2
sliding window of the optional second stream
Definition: vf_fieldmatch.c:80
FieldMatchContext::blockx
int blockx
Definition: vf_fieldmatch.c:101
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
C_ARRAY_ADD
#define C_ARRAY_ADD(v)
matching_mode
matching_mode
Definition: vf_fieldmatch.c:52
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
internal.h
get_field_base
static int get_field_base(int match, int field)
Definition: vf_fieldmatch.c:475
AVFrame::interlaced_frame
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:486
COMBDBG_PCNUB
@ COMBDBG_PCNUB
Definition: vf_fieldmatch.c:72
SLIDING_FRAME_WINDOW
#define SLIDING_FRAME_WINDOW(prv, src, nxt)
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
FieldMatchContext::tpitchuv
int tpitchuv
Definition: vf_fieldmatch.c:110
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:100
MODE_PCN
@ MODE_PCN
Definition: vf_fieldmatch.c:57
INPUT_CLEANSRC
#define INPUT_CLEANSRC
Definition: vf_fieldmatch.c:44
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:55
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:262
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:458
create_weave_frame
static AVFrame * create_weave_frame(AVFilterContext *ctx, int match, int field, const AVFrame *prv, AVFrame *src, const AVFrame *nxt, int input)
Definition: vf_fieldmatch.c:622
AVFilter
Filter definition.
Definition: avfilter.h:161
VERTICAL_HALF
#define VERTICAL_HALF(y_start, y_end)
mN
@ mN
Definition: vf_fieldmatch.c:473
ret
ret
Definition: filter_design.txt:187
FieldMatchContext::combdbg
int combdbg
Definition: vf_fieldmatch.c:98
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
FieldMatchContext::c_array
int * c_array
Definition: vf_fieldmatch.c:109
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:463
FieldMatchContext::prv
AVFrame * prv
Definition: vf_fieldmatch.c:79
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:468
fxo1m
static const int fxo1m[]
Definition: vf_fieldmatch.c:675
AVFrame::height
int height
Definition: frame.h:402
c2
static const uint64_t c2
Definition: murmur3.c:52
luma_abs_diff
static int64_t luma_abs_diff(const AVFrame *f1, const AVFrame *f2)
Definition: vf_fieldmatch.c:166
mode
mode
Definition: ebur128.h:83
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
FF_INLINK_IDX
#define FF_INLINK_IDX(link)
Find the index of a link.
Definition: internal.h:337
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
avfilter.h
FieldMatchContext::prv2
AVFrame * prv2
Definition: vf_fieldmatch.c:80
FieldMatchContext::cmask_linesize
int cmask_linesize[4]
Definition: vf_fieldmatch.c:108
filter_frame
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_fieldmatch.c:677
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVFilterContext
An instance of a filter.
Definition: avfilter.h:392
FM_PARITY_TOP
@ FM_PARITY_TOP
Definition: vf_fieldmatch.c:49
desc
const char * desc
Definition: libsvtav1.c:83
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(fieldmatch)
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
FieldMatchContext::y0
int y0
Definition: vf_fieldmatch.c:94
fieldmatch_uninit
static av_cold void fieldmatch_uninit(AVFilterContext *ctx)
Definition: vf_fieldmatch.c:1009
fieldmatch_init
static av_cold int fieldmatch_init(AVFilterContext *ctx)
Definition: vf_fieldmatch.c:975
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
FieldMatchContext::lastscdiff
int64_t lastscdiff
Definition: vf_fieldmatch.c:85
select_frame
static AVFrame * select_frame(FieldMatchContext *fm, int match)
Definition: vf_fieldmatch.c:480
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:244
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:195
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
FILTER
#define FILTER(xm2, xm1, xp1, xp2)
get_width
static int get_width(const FieldMatchContext *fm, const AVFrame *f, int plane, int input)
Definition: vf_fieldmatch.c:156
imgutils.h
timestamp.h
FieldMatchContext::tpitchy
int tpitchy
Definition: vf_fieldmatch.c:110
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
COMBMATCH_NONE
@ COMBMATCH_NONE
Definition: vf_fieldmatch.c:63
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
uninit
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:285
AV_PIX_FMT_YUV440P12
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:465
h
h
Definition: vp9dsp_template.c:2038
ff_outlink_frame_wanted
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the ff_outlink_frame_wanted() function. If this function returns true
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:469
FieldMatchContext::eof
uint32_t eof
bitmask for end of stream
Definition: vf_fieldmatch.c:84
int
int
Definition: ffmpeg_filter.c:156
FieldMatchContext::lastn
int64_t lastn
Definition: vf_fieldmatch.c:86
calc_combed_score
static int calc_combed_score(const FieldMatchContext *fm, const AVFrame *src)
Definition: vf_fieldmatch.c:196
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
AVPixFmtDescriptor::log2_chroma_h
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:89
COMBDBG_NONE
@ COMBDBG_NONE
Definition: vf_fieldmatch.c:70
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:467