FFmpeg
vf_deband.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Niklas Haas
3  * Copyright (c) 2015 Paul B Mahol
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a copy
6  * of this software and associated documentation files (the "Software"), to deal
7  * in the Software without restriction, including without limitation the rights
8  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9  * copies of the Software, and to permit persons to whom the Software is
10  * furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in
13  * all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21  * SOFTWARE.
22  */
23 
24 #include "libavutil/mem.h"
25 #include "libavutil/opt.h"
26 #include "libavutil/pixdesc.h"
27 #include "avfilter.h"
28 #include "filters.h"
29 #include "formats.h"
30 #include "video.h"
31 
32 typedef struct DebandContext {
33  const AVClass *class;
34 
35  int coupling;
36  float threshold[4];
37  int range;
38  int blur;
39  float direction;
40 
42  int planewidth[4];
43  int planeheight[4];
44  int shift[2];
45  int thr[4];
46 
47  int *x_pos;
48  int *y_pos;
49 
50  int (*deband)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
52 
53 #define OFFSET(x) offsetof(DebandContext, x)
54 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
55 
56 static const AVOption deband_options[] = {
57  { "1thr", "set 1st plane threshold", OFFSET(threshold[0]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
58  { "2thr", "set 2nd plane threshold", OFFSET(threshold[1]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
59  { "3thr", "set 3rd plane threshold", OFFSET(threshold[2]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
60  { "4thr", "set 4th plane threshold", OFFSET(threshold[3]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
61  { "range", "set range", OFFSET(range), AV_OPT_TYPE_INT, {.i64=16}, INT_MIN, INT_MAX, FLAGS },
62  { "r", "set range", OFFSET(range), AV_OPT_TYPE_INT, {.i64=16}, INT_MIN, INT_MAX, FLAGS },
63  { "direction", "set direction", OFFSET(direction), AV_OPT_TYPE_FLOAT, {.dbl=2*M_PI},-2*M_PI, 2*M_PI, FLAGS },
64  { "d", "set direction", OFFSET(direction), AV_OPT_TYPE_FLOAT, {.dbl=2*M_PI},-2*M_PI, 2*M_PI, FLAGS },
65  { "blur", "set blur", OFFSET(blur), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
66  { "b", "set blur", OFFSET(blur), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
67  { "coupling", "set plane coupling", OFFSET(coupling), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
68  { "c", "set plane coupling", OFFSET(coupling), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
69  { NULL }
70 };
71 
72 AVFILTER_DEFINE_CLASS(deband);
73 
74 static int query_formats(const AVFilterContext *ctx,
75  AVFilterFormatsConfig **cfg_in,
76  AVFilterFormatsConfig **cfg_out)
77 {
78  const DebandContext *s = ctx->priv;
79 
80  static const enum AVPixelFormat pix_fmts[] = {
100  };
101 
102  static const enum AVPixelFormat cpix_fmts[] = {
113  };
114 
115  return ff_set_common_formats_from_list2(ctx, cfg_in, cfg_out,
116  s->coupling ? cpix_fmts : pix_fmts);
117 }
118 
119 static float frand(int x, int y)
120 {
121  const float r = sinf(x * 12.9898f + y * 78.233f) * 43758.545f;
122 
123  return r - floorf(r);
124 }
125 
126 static int inline get_avg(int ref0, int ref1, int ref2, int ref3)
127 {
128  return (ref0 + ref1 + ref2 + ref3) / 4;
129 }
130 
131 typedef struct ThreadData {
132  AVFrame *in, *out;
133 } ThreadData;
134 
135 static int deband_8_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
136 {
137  DebandContext *s = ctx->priv;
138  ThreadData *td = arg;
139  AVFrame *in = td->in;
140  AVFrame *out = td->out;
141  int x, y, p;
142 
143  for (p = 0; p < s->nb_components; p++) {
144  const uint8_t *src_ptr = (const uint8_t *)in->data[p];
145  uint8_t *dst_ptr = (uint8_t *)out->data[p];
146  const int dst_linesize = out->linesize[p];
147  const int src_linesize = in->linesize[p];
148  const int thr = s->thr[p];
149  const int start = (s->planeheight[p] * jobnr ) / nb_jobs;
150  const int end = (s->planeheight[p] * (jobnr+1)) / nb_jobs;
151  const int w = s->planewidth[p] - 1;
152  const int h = s->planeheight[p] - 1;
153 
154  for (y = start; y < end; y++) {
155  const int pos = y * s->planewidth[0];
156 
157  for (x = 0; x < s->planewidth[p]; x++) {
158  const int x_pos = s->x_pos[pos + x];
159  const int y_pos = s->y_pos[pos + x];
160  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
161  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
162  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
163  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
164  const int src0 = src_ptr[y * src_linesize + x];
165 
166  if (s->blur) {
167  const int avg = get_avg(ref0, ref1, ref2, ref3);
168  const int diff = FFABS(src0 - avg);
169 
170  dst_ptr[y * dst_linesize + x] = diff < thr ? avg : src0;
171  } else {
172  dst_ptr[y * dst_linesize + x] = (FFABS(src0 - ref0) < thr) &&
173  (FFABS(src0 - ref1) < thr) &&
174  (FFABS(src0 - ref2) < thr) &&
175  (FFABS(src0 - ref3) < thr) ? get_avg(ref0, ref1, ref2, ref3) : src0;
176  }
177  }
178  }
179  }
180 
181  return 0;
182 }
183 
184 static int deband_8_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
185 {
186  DebandContext *s = ctx->priv;
187  ThreadData *td = arg;
188  AVFrame *in = td->in;
189  AVFrame *out = td->out;
190  const int start = (s->planeheight[0] * jobnr ) / nb_jobs;
191  const int end = (s->planeheight[0] * (jobnr+1)) / nb_jobs;
192  int x, y, p;
193 
194  for (y = start; y < end; y++) {
195  const int pos = y * s->planewidth[0];
196 
197  for (x = 0; x < s->planewidth[0]; x++) {
198  const int x_pos = s->x_pos[pos + x];
199  const int y_pos = s->y_pos[pos + x];
200  int avg[4], cmp[4] = { 0 }, src[4];
201 
202  for (p = 0; p < s->nb_components; p++) {
203  const uint8_t *src_ptr = (const uint8_t *)in->data[p];
204  const int src_linesize = in->linesize[p];
205  const int thr = s->thr[p];
206  const int w = s->planewidth[p] - 1;
207  const int h = s->planeheight[p] - 1;
208  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
209  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
210  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
211  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
212  const int src0 = src_ptr[y * src_linesize + x];
213 
214  src[p] = src0;
215  avg[p] = get_avg(ref0, ref1, ref2, ref3);
216 
217  if (s->blur) {
218  cmp[p] = FFABS(src0 - avg[p]) < thr;
219  } else {
220  cmp[p] = (FFABS(src0 - ref0) < thr) &&
221  (FFABS(src0 - ref1) < thr) &&
222  (FFABS(src0 - ref2) < thr) &&
223  (FFABS(src0 - ref3) < thr);
224  }
225  }
226 
227  for (p = 0; p < s->nb_components; p++)
228  if (!cmp[p])
229  break;
230  if (p == s->nb_components) {
231  for (p = 0; p < s->nb_components; p++) {
232  const int dst_linesize = out->linesize[p];
233 
234  out->data[p][y * dst_linesize + x] = avg[p];
235  }
236  } else {
237  for (p = 0; p < s->nb_components; p++) {
238  const int dst_linesize = out->linesize[p];
239 
240  out->data[p][y * dst_linesize + x] = src[p];
241  }
242  }
243  }
244  }
245 
246  return 0;
247 }
248 
249 static int deband_16_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
250 {
251  DebandContext *s = ctx->priv;
252  ThreadData *td = arg;
253  AVFrame *in = td->in;
254  AVFrame *out = td->out;
255  const int start = (s->planeheight[0] * jobnr ) / nb_jobs;
256  const int end = (s->planeheight[0] * (jobnr+1)) / nb_jobs;
257  int x, y, p, z;
258 
259  for (y = start; y < end; y++) {
260  const int pos = y * s->planewidth[0];
261 
262  for (x = 0; x < s->planewidth[0]; x++) {
263  const int x_pos = s->x_pos[pos + x];
264  const int y_pos = s->y_pos[pos + x];
265  int avg[4], cmp[4] = { 0 }, src[4];
266 
267  for (p = 0; p < s->nb_components; p++) {
268  const uint16_t *src_ptr = (const uint16_t *)in->data[p];
269  const int src_linesize = in->linesize[p] / 2;
270  const int thr = s->thr[p];
271  const int w = s->planewidth[p] - 1;
272  const int h = s->planeheight[p] - 1;
273  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
274  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
275  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
276  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
277  const int src0 = src_ptr[y * src_linesize + x];
278 
279  src[p] = src0;
280  avg[p] = get_avg(ref0, ref1, ref2, ref3);
281 
282  if (s->blur) {
283  cmp[p] = FFABS(src0 - avg[p]) < thr;
284  } else {
285  cmp[p] = (FFABS(src0 - ref0) < thr) &&
286  (FFABS(src0 - ref1) < thr) &&
287  (FFABS(src0 - ref2) < thr) &&
288  (FFABS(src0 - ref3) < thr);
289  }
290  }
291 
292  for (z = 0; z < s->nb_components; z++)
293  if (!cmp[z])
294  break;
295  if (z == s->nb_components) {
296  for (p = 0; p < s->nb_components; p++) {
297  const int dst_linesize = out->linesize[p] / 2;
298  uint16_t *dst = (uint16_t *)out->data[p] + y * dst_linesize + x;
299 
300  dst[0] = avg[p];
301  }
302  } else {
303  for (p = 0; p < s->nb_components; p++) {
304  const int dst_linesize = out->linesize[p] / 2;
305  uint16_t *dst = (uint16_t *)out->data[p] + y * dst_linesize + x;
306 
307  dst[0] = src[p];
308  }
309  }
310  }
311  }
312 
313  return 0;
314 }
315 
316 static int deband_16_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
317 {
318  DebandContext *s = ctx->priv;
319  ThreadData *td = arg;
320  AVFrame *in = td->in;
321  AVFrame *out = td->out;
322  int x, y, p;
323 
324  for (p = 0; p < s->nb_components; p++) {
325  const uint16_t *src_ptr = (const uint16_t *)in->data[p];
326  uint16_t *dst_ptr = (uint16_t *)out->data[p];
327  const int dst_linesize = out->linesize[p] / 2;
328  const int src_linesize = in->linesize[p] / 2;
329  const int thr = s->thr[p];
330  const int start = (s->planeheight[p] * jobnr ) / nb_jobs;
331  const int end = (s->planeheight[p] * (jobnr+1)) / nb_jobs;
332  const int w = s->planewidth[p] - 1;
333  const int h = s->planeheight[p] - 1;
334 
335  for (y = start; y < end; y++) {
336  const int pos = y * s->planewidth[0];
337 
338  for (x = 0; x < s->planewidth[p]; x++) {
339  const int x_pos = s->x_pos[pos + x];
340  const int y_pos = s->y_pos[pos + x];
341  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
342  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
343  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
344  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
345  const int src0 = src_ptr[y * src_linesize + x];
346 
347  if (s->blur) {
348  const int avg = get_avg(ref0, ref1, ref2, ref3);
349  const int diff = FFABS(src0 - avg);
350 
351  dst_ptr[y * dst_linesize + x] = diff < thr ? avg : src0;
352  } else {
353  dst_ptr[y * dst_linesize + x] = (FFABS(src0 - ref0) < thr) &&
354  (FFABS(src0 - ref1) < thr) &&
355  (FFABS(src0 - ref2) < thr) &&
356  (FFABS(src0 - ref3) < thr) ? get_avg(ref0, ref1, ref2, ref3) : src0;
357  }
358  }
359  }
360  }
361 
362  return 0;
363 }
364 
366 {
368  AVFilterContext *ctx = inlink->dst;
369  DebandContext *s = ctx->priv;
370  const float direction = s->direction;
371  const int range = s->range;
372  int x, y;
373 
374  s->nb_components = desc->nb_components;
375 
376  s->planeheight[1] = s->planeheight[2] = AV_CEIL_RSHIFT(inlink->h, desc->log2_chroma_h);
377  s->planeheight[0] = s->planeheight[3] = inlink->h;
378  s->planewidth[1] = s->planewidth[2] = AV_CEIL_RSHIFT(inlink->w, desc->log2_chroma_w);
379  s->planewidth[0] = s->planewidth[3] = inlink->w;
380  s->shift[0] = desc->log2_chroma_w;
381  s->shift[1] = desc->log2_chroma_h;
382 
383  if (s->coupling)
384  s->deband = desc->comp[0].depth > 8 ? deband_16_coupling_c : deband_8_coupling_c;
385  else
386  s->deband = desc->comp[0].depth > 8 ? deband_16_c : deband_8_c;
387 
388  s->thr[0] = ((1 << desc->comp[0].depth) - 1) * s->threshold[0];
389  s->thr[1] = ((1 << desc->comp[1].depth) - 1) * s->threshold[1];
390  s->thr[2] = ((1 << desc->comp[2].depth) - 1) * s->threshold[2];
391  s->thr[3] = ((1 << desc->comp[3].depth) - 1) * s->threshold[3];
392 
393  if (!s->x_pos)
394  s->x_pos = av_malloc(s->planewidth[0] * s->planeheight[0] * sizeof(*s->x_pos));
395  if (!s->y_pos)
396  s->y_pos = av_malloc(s->planewidth[0] * s->planeheight[0] * sizeof(*s->y_pos));
397  if (!s->x_pos || !s->y_pos)
398  return AVERROR(ENOMEM);
399 
400  for (y = 0; y < s->planeheight[0]; y++) {
401  for (x = 0; x < s->planewidth[0]; x++) {
402  const float r = frand(x, y);
403  const float dir = direction < 0 ? -direction : r * direction;
404  const int dist = range < 0 ? -range : r * range;
405 
406  s->x_pos[y * s->planewidth[0] + x] = cosf(dir) * dist;
407  s->y_pos[y * s->planewidth[0] + x] = sinf(dir) * dist;
408  }
409  }
410 
411  return 0;
412 }
413 
415 {
416  AVFilterContext *ctx = inlink->dst;
417  AVFilterLink *outlink = ctx->outputs[0];
418  DebandContext *s = ctx->priv;
419  AVFrame *out;
420  ThreadData td;
421 
422  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
423  if (!out) {
424  av_frame_free(&in);
425  return AVERROR(ENOMEM);
426  }
428 
429  td.in = in; td.out = out;
430  ff_filter_execute(ctx, s->deband, &td, NULL,
431  FFMIN3(s->planeheight[1], s->planeheight[2],
433 
434  av_frame_free(&in);
435  return ff_filter_frame(outlink, out);
436 }
437 
438 static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
439  char *res, int res_len, int flags)
440 {
441  int ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags);
442 
443  if (ret < 0)
444  return ret;
445 
446  return config_input(ctx->inputs[0]);
447 }
448 
450 {
451  DebandContext *s = ctx->priv;
452 
453  av_freep(&s->x_pos);
454  av_freep(&s->y_pos);
455 }
456 
458  {
459  .name = "default",
460  .type = AVMEDIA_TYPE_VIDEO,
461  .config_props = config_input,
462  .filter_frame = filter_frame,
463  },
464 };
465 
467  .name = "deband",
468  .description = NULL_IF_CONFIG_SMALL("Debands video."),
469  .priv_size = sizeof(DebandContext),
470  .priv_class = &deband_class,
471  .uninit = uninit,
476  .process_command = process_command,
477 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:116
AV_PIX_FMT_YUVA422P16
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:546
AV_PIX_FMT_GBRAP16
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:525
deband_16_coupling_c
static int deband_16_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:249
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
av_clip
#define av_clip
Definition: common.h:100
r
const char * r
Definition: vf_curves.c:127
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
DebandContext::threshold
float threshold[4]
Definition: vf_deband.c:36
opt.h
DebandContext::range
int range
Definition: vf_deband.c:37
out
FILE * out
Definition: movenc.c:55
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1062
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3170
deband_8_coupling_c
static int deband_8_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:184
floorf
static __device__ float floorf(float a)
Definition: cuda_runtime.h:172
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
AV_PIX_FMT_YUVA422P9
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:538
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:262
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
pixdesc.h
AV_PIX_FMT_YUVA420P16
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:545
w
uint8_t w
Definition: llviddspenc.c:38
frand
static float frand(int x, int y)
Definition: vf_deband.c:119
AV_PIX_FMT_YUVA420P10
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:540
AVOption
AVOption.
Definition: opt.h:429
config_input
static int config_input(AVFilterLink *inlink)
Definition: vf_deband.c:365
filter_frame
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_deband.c:414
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:106
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:205
ThreadData::out
AVFrame * out
Definition: af_adeclick.c:526
video.h
ThreadData::in
AVFrame * in
Definition: af_adecorrelate.c:155
AV_PIX_FMT_YUVA422P10
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:541
AV_PIX_FMT_GRAY9
#define AV_PIX_FMT_GRAY9
Definition: pixfmt.h:482
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:410
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
formats.h
DebandContext::y_pos
int * y_pos
Definition: vf_deband.c:48
AV_PIX_FMT_YUVA420P9
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:537
deband_16_c
static int deband_16_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:316
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:520
AV_PIX_FMT_GBRAP
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:212
cosf
#define cosf(x)
Definition: libm.h:78
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:518
AV_PIX_FMT_YUVA444P16
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:547
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:500
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:486
DebandContext::deband
int(* deband)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:50
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:38
ff_vf_deband
const AVFilter ff_vf_deband
Definition: vf_deband.c:466
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:283
DebandContext::shift
int shift[2]
Definition: vf_deband.c:44
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:514
ff_video_default_filterpad
const AVFilterPad ff_video_default_filterpad[1]
An AVFilterPad array whose only entry has name "default" and is of type AVMEDIA_TYPE_VIDEO.
Definition: video.c:37
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:86
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:108
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:515
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:60
FLAGS
#define FLAGS
Definition: vf_deband.c:54
query_formats
static int query_formats(const AVFilterContext *ctx, AVFilterFormatsConfig **cfg_in, AVFilterFormatsConfig **cfg_out)
Definition: vf_deband.c:74
filters.h
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:304
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:499
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:513
ctx
AVFormatContext * ctx
Definition: movenc.c:49
AV_PIX_FMT_GRAY14
#define AV_PIX_FMT_GRAY14
Definition: pixfmt.h:485
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:263
cmp
static av_always_inline int cmp(MpegEncContext *s, const int x, const int y, const int subx, const int suby, const int size, const int h, int ref_index, int src_index, me_cmp_func cmp_func, me_cmp_func chroma_cmp_func, const int flags)
compares a block (either a full macroblock or a partition thereof) against a proposed motion-compensa...
Definition: motion_est.c:262
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:87
arg
const char * arg
Definition: jacosubdec.c:67
FFABS
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:74
AV_PIX_FMT_GRAY10
#define AV_PIX_FMT_GRAY10
Definition: pixfmt.h:483
AV_PIX_FMT_GBRP16
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:521
DebandContext::planeheight
int planeheight[4]
Definition: vf_deband.c:43
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:75
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:713
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
DebandContext::coupling
int coupling
Definition: vf_deband.c:35
DebandContext::planewidth
int planewidth[4]
Definition: vf_deband.c:42
DebandContext
Definition: vf_deband.c:32
sinf
#define sinf(x)
Definition: libm.h:419
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:517
AVFilterFormatsConfig
Lists of formats / etc.
Definition: avfilter.h:111
DebandContext::nb_components
int nb_components
Definition: vf_deband.c:41
get_avg
static int get_avg(int ref0, int ref1, int ref2, int ref3)
Definition: vf_deband.c:126
f
f
Definition: af_crystalizer.c:122
blur
static void blur(uint8_t *dst, int dst_step, const uint8_t *src, int src_step, int len, int radius, int pixsize)
Definition: vf_boxblur.c:162
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:83
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:507
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:509
avg
#define avg(a, b, c, d)
Definition: colorspacedsp_template.c:28
range
enum AVColorRange range
Definition: mediacodec_wrapper.c:2464
ff_filter_process_command
int ff_filter_process_command(AVFilterContext *ctx, const char *cmd, const char *arg, char *res, int res_len, int flags)
Generic processing of user supplied commands that are set in the same way as the filter options.
Definition: avfilter.c:901
diff
static av_always_inline int diff(const struct color_info *a, const struct color_info *b, const int trans_thresh)
Definition: vf_paletteuse.c:166
avfilter_vf_deband_inputs
static const AVFilterPad avfilter_vf_deband_inputs[]
Definition: vf_deband.c:457
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:174
AV_PIX_FMT_YUVA444P10
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:542
M_PI
#define M_PI
Definition: mathematics.h:67
AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:182
AV_OPT_TYPE_FLOAT
@ AV_OPT_TYPE_FLOAT
Underlying C type is float.
Definition: opt.h:271
DebandContext::thr
int thr[4]
Definition: vf_deband.c:45
FFMIN3
#define FFMIN3(a, b, c)
Definition: macros.h:50
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:519
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:841
ThreadData
Used for passing data between threads.
Definition: dsddec.c:71
FILTER_QUERY_FUNC2
#define FILTER_QUERY_FUNC2(func)
Definition: filters.h:239
DebandContext::x_pos
int * x_pos
Definition: vf_deband.c:47
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:107
deband_options
static const AVOption deband_options[]
Definition: vf_deband.c:56
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:44
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:501
DebandContext::blur
int blur
Definition: vf_deband.c:38
AVFilter
Filter definition.
Definition: avfilter.h:201
OFFSET
#define OFFSET(x)
Definition: vf_deband.c:53
ret
ret
Definition: filter_design.txt:187
DebandContext::direction
float direction
Definition: vf_deband.c:39
AV_PIX_FMT_YUVA444P9
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:539
pos
unsigned int pos
Definition: spdifenc.c:414
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:506
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:511
ff_set_common_formats_from_list2
int ff_set_common_formats_from_list2(const AVFilterContext *ctx, AVFilterFormatsConfig **cfg_in, AVFilterFormatsConfig **cfg_out, const int *fmts)
Definition: formats.c:1016
ff_filter_execute
int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: avfilter.c:1667
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
avfilter.h
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(deband)
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
src0
const pixel *const src0
Definition: h264pred_template.c:420
AVFilterContext
An instance of a filter.
Definition: avfilter.h:457
deband_8_c
static int deband_8_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:135
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:165
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:152
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
mem.h
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
Definition: opt.h:327
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:80
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:482
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:434
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:79
h
h
Definition: vp9dsp_template.c:2070
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:512
AV_PIX_FMT_GRAY12
#define AV_PIX_FMT_GRAY12
Definition: pixfmt.h:484
process_command
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: vf_deband.c:438
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_deband.c:449
src
#define src
Definition: vp8dsp.c:248
AV_PIX_FMT_YUVA422P
@ AV_PIX_FMT_YUVA422P
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:173
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:510