FFmpeg
vf_deband.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Niklas Haas
3  * Copyright (c) 2015 Paul B Mahol
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a copy
6  * of this software and associated documentation files (the "Software"), to deal
7  * in the Software without restriction, including without limitation the rights
8  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9  * copies of the Software, and to permit persons to whom the Software is
10  * furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in
13  * all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21  * SOFTWARE.
22  */
23 
24 #include "libavutil/opt.h"
25 #include "libavutil/pixdesc.h"
26 #include "avfilter.h"
27 #include "internal.h"
28 #include "video.h"
29 
30 typedef struct DebandContext {
31  const AVClass *class;
32 
33  int coupling;
34  float threshold[4];
35  int range;
36  int blur;
37  float direction;
38 
40  int planewidth[4];
41  int planeheight[4];
42  int shift[2];
43  int thr[4];
44 
45  int *x_pos;
46  int *y_pos;
47 
48  int (*deband)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
50 
51 #define OFFSET(x) offsetof(DebandContext, x)
52 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
53 
54 static const AVOption deband_options[] = {
55  { "1thr", "set 1st plane threshold", OFFSET(threshold[0]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
56  { "2thr", "set 2nd plane threshold", OFFSET(threshold[1]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
57  { "3thr", "set 3rd plane threshold", OFFSET(threshold[2]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
58  { "4thr", "set 4th plane threshold", OFFSET(threshold[3]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
59  { "range", "set range", OFFSET(range), AV_OPT_TYPE_INT, {.i64=16}, INT_MIN, INT_MAX, FLAGS },
60  { "r", "set range", OFFSET(range), AV_OPT_TYPE_INT, {.i64=16}, INT_MIN, INT_MAX, FLAGS },
61  { "direction", "set direction", OFFSET(direction), AV_OPT_TYPE_FLOAT, {.dbl=2*M_PI},-2*M_PI, 2*M_PI, FLAGS },
62  { "d", "set direction", OFFSET(direction), AV_OPT_TYPE_FLOAT, {.dbl=2*M_PI},-2*M_PI, 2*M_PI, FLAGS },
63  { "blur", "set blur", OFFSET(blur), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
64  { "b", "set blur", OFFSET(blur), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
65  { "coupling", "set plane coupling", OFFSET(coupling), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
66  { "c", "set plane coupling", OFFSET(coupling), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
67  { NULL }
68 };
69 
70 AVFILTER_DEFINE_CLASS(deband);
71 
73 {
74  DebandContext *s = ctx->priv;
75 
76  static const enum AVPixelFormat pix_fmts[] = {
95  };
96 
97  static const enum AVPixelFormat cpix_fmts[] = {
108  };
109 
110  AVFilterFormats *fmts_list = ff_make_format_list(s->coupling ? cpix_fmts : pix_fmts);
111  if (!fmts_list)
112  return AVERROR(ENOMEM);
113 
114  return ff_set_common_formats(ctx, fmts_list);
115 }
116 
117 static float frand(int x, int y)
118 {
119  const float r = sinf(x * 12.9898 + y * 78.233) * 43758.545;
120 
121  return r - floorf(r);
122 }
123 
124 static int inline get_avg(int ref0, int ref1, int ref2, int ref3)
125 {
126  return (ref0 + ref1 + ref2 + ref3) / 4;
127 }
128 
129 typedef struct ThreadData {
130  AVFrame *in, *out;
131 } ThreadData;
132 
133 static int deband_8_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
134 {
135  DebandContext *s = ctx->priv;
136  ThreadData *td = arg;
137  AVFrame *in = td->in;
138  AVFrame *out = td->out;
139  int x, y, p;
140 
141  for (p = 0; p < s->nb_components; p++) {
142  const uint8_t *src_ptr = (const uint8_t *)in->data[p];
143  uint8_t *dst_ptr = (uint8_t *)out->data[p];
144  const int dst_linesize = out->linesize[p];
145  const int src_linesize = in->linesize[p];
146  const int thr = s->thr[p];
147  const int start = (s->planeheight[p] * jobnr ) / nb_jobs;
148  const int end = (s->planeheight[p] * (jobnr+1)) / nb_jobs;
149  const int w = s->planewidth[p] - 1;
150  const int h = s->planeheight[p] - 1;
151 
152  for (y = start; y < end; y++) {
153  const int pos = y * s->planewidth[0];
154 
155  for (x = 0; x < s->planewidth[p]; x++) {
156  const int x_pos = s->x_pos[pos + x];
157  const int y_pos = s->y_pos[pos + x];
158  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
159  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
160  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
161  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
162  const int src0 = src_ptr[y * src_linesize + x];
163 
164  if (s->blur) {
165  const int avg = get_avg(ref0, ref1, ref2, ref3);
166  const int diff = FFABS(src0 - avg);
167 
168  dst_ptr[y * dst_linesize + x] = diff < thr ? avg : src0;
169  } else {
170  dst_ptr[y * dst_linesize + x] = (FFABS(src0 - ref0) < thr) &&
171  (FFABS(src0 - ref1) < thr) &&
172  (FFABS(src0 - ref2) < thr) &&
173  (FFABS(src0 - ref3) < thr) ? get_avg(ref0, ref1, ref2, ref3) : src0;
174  }
175  }
176  }
177  }
178 
179  return 0;
180 }
181 
182 static int deband_8_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
183 {
184  DebandContext *s = ctx->priv;
185  ThreadData *td = arg;
186  AVFrame *in = td->in;
187  AVFrame *out = td->out;
188  const int start = (s->planeheight[0] * jobnr ) / nb_jobs;
189  const int end = (s->planeheight[0] * (jobnr+1)) / nb_jobs;
190  int x, y, p;
191 
192  for (y = start; y < end; y++) {
193  const int pos = y * s->planewidth[0];
194 
195  for (x = 0; x < s->planewidth[0]; x++) {
196  const int x_pos = s->x_pos[pos + x];
197  const int y_pos = s->y_pos[pos + x];
198  int avg[4], cmp[4] = { 0 }, src[4];
199 
200  for (p = 0; p < s->nb_components; p++) {
201  const uint8_t *src_ptr = (const uint8_t *)in->data[p];
202  const int src_linesize = in->linesize[p];
203  const int thr = s->thr[p];
204  const int w = s->planewidth[p] - 1;
205  const int h = s->planeheight[p] - 1;
206  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
207  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
208  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
209  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
210  const int src0 = src_ptr[y * src_linesize + x];
211 
212  src[p] = src0;
213  avg[p] = get_avg(ref0, ref1, ref2, ref3);
214 
215  if (s->blur) {
216  cmp[p] = FFABS(src0 - avg[p]) < thr;
217  } else {
218  cmp[p] = (FFABS(src0 - ref0) < thr) &&
219  (FFABS(src0 - ref1) < thr) &&
220  (FFABS(src0 - ref2) < thr) &&
221  (FFABS(src0 - ref3) < thr);
222  }
223  }
224 
225  for (p = 0; p < s->nb_components; p++)
226  if (!cmp[p])
227  break;
228  if (p == s->nb_components) {
229  for (p = 0; p < s->nb_components; p++) {
230  const int dst_linesize = out->linesize[p];
231 
232  out->data[p][y * dst_linesize + x] = avg[p];
233  }
234  } else {
235  for (p = 0; p < s->nb_components; p++) {
236  const int dst_linesize = out->linesize[p];
237 
238  out->data[p][y * dst_linesize + x] = src[p];
239  }
240  }
241  }
242  }
243 
244  return 0;
245 }
246 
247 static int deband_16_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
248 {
249  DebandContext *s = ctx->priv;
250  ThreadData *td = arg;
251  AVFrame *in = td->in;
252  AVFrame *out = td->out;
253  const int start = (s->planeheight[0] * jobnr ) / nb_jobs;
254  const int end = (s->planeheight[0] * (jobnr+1)) / nb_jobs;
255  int x, y, p, z;
256 
257  for (y = start; y < end; y++) {
258  const int pos = y * s->planewidth[0];
259 
260  for (x = 0; x < s->planewidth[0]; x++) {
261  const int x_pos = s->x_pos[pos + x];
262  const int y_pos = s->y_pos[pos + x];
263  int avg[4], cmp[4] = { 0 }, src[4];
264 
265  for (p = 0; p < s->nb_components; p++) {
266  const uint16_t *src_ptr = (const uint16_t *)in->data[p];
267  const int src_linesize = in->linesize[p] / 2;
268  const int thr = s->thr[p];
269  const int w = s->planewidth[p] - 1;
270  const int h = s->planeheight[p] - 1;
271  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
272  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
273  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
274  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
275  const int src0 = src_ptr[y * src_linesize + x];
276 
277  src[p] = src0;
278  avg[p] = get_avg(ref0, ref1, ref2, ref3);
279 
280  if (s->blur) {
281  cmp[p] = FFABS(src0 - avg[p]) < thr;
282  } else {
283  cmp[p] = (FFABS(src0 - ref0) < thr) &&
284  (FFABS(src0 - ref1) < thr) &&
285  (FFABS(src0 - ref2) < thr) &&
286  (FFABS(src0 - ref3) < thr);
287  }
288  }
289 
290  for (z = 0; z < s->nb_components; z++)
291  if (!cmp[z])
292  break;
293  if (z == s->nb_components) {
294  for (p = 0; p < s->nb_components; p++) {
295  const int dst_linesize = out->linesize[p] / 2;
296  uint16_t *dst = (uint16_t *)out->data[p] + y * dst_linesize + x;
297 
298  dst[0] = avg[p];
299  }
300  } else {
301  for (p = 0; p < s->nb_components; p++) {
302  const int dst_linesize = out->linesize[p] / 2;
303  uint16_t *dst = (uint16_t *)out->data[p] + y * dst_linesize + x;
304 
305  dst[0] = src[p];
306  }
307  }
308  }
309  }
310 
311  return 0;
312 }
313 
314 static int deband_16_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
315 {
316  DebandContext *s = ctx->priv;
317  ThreadData *td = arg;
318  AVFrame *in = td->in;
319  AVFrame *out = td->out;
320  int x, y, p;
321 
322  for (p = 0; p < s->nb_components; p++) {
323  const uint16_t *src_ptr = (const uint16_t *)in->data[p];
324  uint16_t *dst_ptr = (uint16_t *)out->data[p];
325  const int dst_linesize = out->linesize[p] / 2;
326  const int src_linesize = in->linesize[p] / 2;
327  const int thr = s->thr[p];
328  const int start = (s->planeheight[p] * jobnr ) / nb_jobs;
329  const int end = (s->planeheight[p] * (jobnr+1)) / nb_jobs;
330  const int w = s->planewidth[p] - 1;
331  const int h = s->planeheight[p] - 1;
332 
333  for (y = start; y < end; y++) {
334  const int pos = y * s->planewidth[0];
335 
336  for (x = 0; x < s->planewidth[p]; x++) {
337  const int x_pos = s->x_pos[pos + x];
338  const int y_pos = s->y_pos[pos + x];
339  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
340  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
341  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
342  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
343  const int src0 = src_ptr[y * src_linesize + x];
344 
345  if (s->blur) {
346  const int avg = get_avg(ref0, ref1, ref2, ref3);
347  const int diff = FFABS(src0 - avg);
348 
349  dst_ptr[y * dst_linesize + x] = diff < thr ? avg : src0;
350  } else {
351  dst_ptr[y * dst_linesize + x] = (FFABS(src0 - ref0) < thr) &&
352  (FFABS(src0 - ref1) < thr) &&
353  (FFABS(src0 - ref2) < thr) &&
354  (FFABS(src0 - ref3) < thr) ? get_avg(ref0, ref1, ref2, ref3) : src0;
355  }
356  }
357  }
358  }
359 
360  return 0;
361 }
362 
364 {
366  AVFilterContext *ctx = inlink->dst;
367  DebandContext *s = ctx->priv;
368  const float direction = s->direction;
369  const int range = s->range;
370  int x, y;
371 
372  s->nb_components = desc->nb_components;
373 
374  s->planeheight[1] = s->planeheight[2] = AV_CEIL_RSHIFT(inlink->h, desc->log2_chroma_h);
375  s->planeheight[0] = s->planeheight[3] = inlink->h;
376  s->planewidth[1] = s->planewidth[2] = AV_CEIL_RSHIFT(inlink->w, desc->log2_chroma_w);
377  s->planewidth[0] = s->planewidth[3] = inlink->w;
378  s->shift[0] = desc->log2_chroma_w;
379  s->shift[1] = desc->log2_chroma_h;
380 
381  if (s->coupling)
382  s->deband = desc->comp[0].depth > 8 ? deband_16_coupling_c : deband_8_coupling_c;
383  else
384  s->deband = desc->comp[0].depth > 8 ? deband_16_c : deband_8_c;
385 
386  s->thr[0] = ((1 << desc->comp[0].depth) - 1) * s->threshold[0];
387  s->thr[1] = ((1 << desc->comp[1].depth) - 1) * s->threshold[1];
388  s->thr[2] = ((1 << desc->comp[2].depth) - 1) * s->threshold[2];
389  s->thr[3] = ((1 << desc->comp[3].depth) - 1) * s->threshold[3];
390 
391  s->x_pos = av_malloc(s->planewidth[0] * s->planeheight[0] * sizeof(*s->x_pos));
392  s->y_pos = av_malloc(s->planewidth[0] * s->planeheight[0] * sizeof(*s->y_pos));
393  if (!s->x_pos || !s->y_pos)
394  return AVERROR(ENOMEM);
395 
396  for (y = 0; y < s->planeheight[0]; y++) {
397  for (x = 0; x < s->planewidth[0]; x++) {
398  const float r = frand(x, y);
399  const float dir = direction < 0 ? -direction : r * direction;
400  const int dist = range < 0 ? -range : r * range;
401 
402  s->x_pos[y * s->planewidth[0] + x] = cosf(dir) * dist;
403  s->y_pos[y * s->planewidth[0] + x] = sinf(dir) * dist;
404  }
405  }
406 
407  return 0;
408 }
409 
411 {
412  AVFilterContext *ctx = inlink->dst;
413  AVFilterLink *outlink = ctx->outputs[0];
414  DebandContext *s = ctx->priv;
415  AVFrame *out;
416  ThreadData td;
417 
418  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
419  if (!out) {
420  av_frame_free(&in);
421  return AVERROR(ENOMEM);
422  }
424 
425  td.in = in; td.out = out;
426  ctx->internal->execute(ctx, s->deband, &td, NULL, FFMIN3(s->planeheight[1],
427  s->planeheight[2],
429 
430  av_frame_free(&in);
431  return ff_filter_frame(outlink, out);
432 }
433 
435 {
436  DebandContext *s = ctx->priv;
437 
438  av_freep(&s->x_pos);
439  av_freep(&s->y_pos);
440 }
441 
443  {
444  .name = "default",
445  .type = AVMEDIA_TYPE_VIDEO,
446  .config_props = config_input,
447  .filter_frame = filter_frame,
448  },
449  { NULL }
450 };
451 
453  {
454  .name = "default",
455  .type = AVMEDIA_TYPE_VIDEO,
456  },
457  { NULL }
458 };
459 
461  .name = "deband",
462  .description = NULL_IF_CONFIG_SMALL("Debands video."),
463  .priv_size = sizeof(DebandContext),
464  .priv_class = &deband_class,
465  .uninit = uninit,
470 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
AV_PIX_FMT_YUVA422P16
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:430
AV_PIX_FMT_GBRAP16
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:409
td
#define td
Definition: regdef.h:70
deband_16_coupling_c
static int deband_16_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:247
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
r
const char * r
Definition: vf_curves.c:114
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
DebandContext::threshold
float threshold[4]
Definition: vf_deband.c:34
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
DebandContext::range
int range
Definition: vf_deband.c:35
out
FILE * out
Definition: movenc.c:54
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1080
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2522
deband_8_coupling_c
static int deband_8_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:182
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
end
static av_cold int end(AVCodecContext *avctx)
Definition: avrndec.c:90
AV_PIX_FMT_YUVA422P9
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:422
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
pixdesc.h
AV_PIX_FMT_YUVA420P16
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:429
w
uint8_t w
Definition: llviddspenc.c:38
frand
static float frand(int x, int y)
Definition: vf_deband.c:117
AV_PIX_FMT_YUVA420P10
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:424
AVOption
AVOption.
Definition: opt.h:246
config_input
static int config_input(AVFilterLink *inlink)
Definition: vf_deband.c:363
filter_frame
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_deband.c:410
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:148
ThreadData::out
AVFrame * out
Definition: af_adeclick.c:488
video.h
AVFormatContext::internal
AVFormatInternal * internal
An opaque field for libavformat internal usage.
Definition: avformat.h:1795
AV_PIX_FMT_YUVA422P10
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:425
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:31
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
DebandContext::y_pos
int * y_pos
Definition: vf_deband.c:46
AV_PIX_FMT_YUVA420P9
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:421
deband_16_c
static int deband_16_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:314
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:405
AV_PIX_FMT_GBRAP
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:215
cosf
#define cosf(x)
Definition: libm.h:78
start
void INT64 start
Definition: avisynth_c.h:767
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:403
AV_PIX_FMT_YUVA444P16
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:431
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:385
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:371
FFMIN3
#define FFMIN3(a, b, c)
Definition: common.h:97
DebandContext::deband
int(* deband)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:48
src
#define src
Definition: vp8dsp.c:254
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:258
DebandContext::shift
int shift[2]
Definition: vf_deband.c:42
av_cold
#define av_cold
Definition: attributes.h:84
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:399
ff_set_common_formats
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:568
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
s
#define s(width, name)
Definition: cbs_vp9.c:257
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:101
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:400
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
FLAGS
#define FLAGS
Definition: vf_deband.c:52
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:275
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:384
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:398
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
cmp
static av_always_inline int cmp(MpegEncContext *s, const int x, const int y, const int subx, const int suby, const int size, const int h, int ref_index, int src_index, me_cmp_func cmp_func, me_cmp_func chroma_cmp_func, const int flags)
compares a block (either a full macroblock or a partition thereof) against a proposed motion-compensa...
Definition: motion_est.c:260
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
arg
const char * arg
Definition: jacosubdec.c:66
FFABS
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:72
AV_PIX_FMT_GBRP16
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:406
DebandContext::planeheight
int planeheight[4]
Definition: vf_deband.c:41
ff_vf_deband
AVFilter ff_vf_deband
Definition: vf_deband.c:460
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:654
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
DebandContext::coupling
int coupling
Definition: vf_deband.c:33
DebandContext::planewidth
int planewidth[4]
Definition: vf_deband.c:40
DebandContext
Definition: vf_deband.c:30
sinf
#define sinf(x)
Definition: libm.h:419
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:402
DebandContext::nb_components
int nb_components
Definition: vf_deband.c:39
get_avg
static int get_avg(int ref0, int ref1, int ref2, int ref3)
Definition: vf_deband.c:124
desc
const char * desc
Definition: nvenc.c:68
blur
static void blur(uint8_t *dst, int dst_step, const uint8_t *src, int src_step, int len, int radius, int pixsize)
Definition: vf_boxblur.c:160
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:188
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:392
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:394
avg
#define avg(a, b, c, d)
Definition: colorspacedsp_template.c:28
avfilter_vf_deband_inputs
static const AVFilterPad avfilter_vf_deband_inputs[]
Definition: vf_deband.c:442
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:177
AV_PIX_FMT_YUVA444P10
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:426
M_PI
#define M_PI
Definition: mathematics.h:52
src0
#define src0
Definition: h264pred.c:138
internal.h
AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:125
AV_OPT_TYPE_FLOAT
@ AV_OPT_TYPE_FLOAT
Definition: opt.h:226
DebandContext::thr
int thr[4]
Definition: vf_deband.c:43
in
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
Definition: audio_convert.c:326
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:404
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:802
ThreadData
Used for passing data between threads.
Definition: af_adeclick.c:487
DebandContext::x_pos
int * x_pos
Definition: vf_deband.c:45
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:100
uint8_t
uint8_t
Definition: audio_convert.c:194
deband_options
static const AVOption deband_options[]
Definition: vf_deband.c:54
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:60
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:386
DebandContext::blur
int blur
Definition: vf_deband.c:36
avfilter_vf_deband_outputs
static const AVFilterPad avfilter_vf_deband_outputs[]
Definition: vf_deband.c:452
AVFilter
Filter definition.
Definition: avfilter.h:144
OFFSET
#define OFFSET(x)
Definition: vf_deband.c:51
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_deband.c:72
DebandContext::direction
float direction
Definition: vf_deband.c:37
AV_PIX_FMT_YUVA444P9
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:423
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:391
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:396
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:223
avfilter.h
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(deband)
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVFilterContext
An instance of a filter.
Definition: avfilter.h:338
deband_8_c
static int deband_8_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:133
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:168
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:116
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
ThreadData::in
AVFrame * in
Definition: af_afftdn.c:1082
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
diff
static av_always_inline int diff(const uint32_t a, const uint32_t b)
Definition: vf_palettegen.c:136
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:240
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:565
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
h
h
Definition: vp9dsp_template.c:2038
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:397
int
int
Definition: ffmpeg_filter.c:191
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_deband.c:434
AV_PIX_FMT_YUVA422P
@ AV_PIX_FMT_YUVA422P
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:176
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:395