FFmpeg
vf_bwdif.c
Go to the documentation of this file.
1 /*
2  * BobWeaver Deinterlacing Filter
3  * Copyright (C) 2016 Thomas Mundt <loudmax@yahoo.de>
4  *
5  * Based on YADIF (Yet Another Deinterlacing Filter)
6  * Copyright (C) 2006-2011 Michael Niedermayer <michaelni@gmx.at>
7  * 2010 James Darnley <james.darnley@gmail.com>
8  *
9  * With use of Weston 3 Field Deinterlacing Filter algorithm
10  * Copyright (C) 2012 British Broadcasting Corporation, All Rights Reserved
11  * Author of de-interlace algorithm: Jim Easterbrook for BBC R&D
12  * Based on the process described by Martin Weston for BBC R&D
13  *
14  * This file is part of FFmpeg.
15  *
16  * FFmpeg is free software; you can redistribute it and/or
17  * modify it under the terms of the GNU Lesser General Public
18  * License as published by the Free Software Foundation; either
19  * version 2.1 of the License, or (at your option) any later version.
20  *
21  * FFmpeg is distributed in the hope that it will be useful,
22  * but WITHOUT ANY WARRANTY; without even the implied warranty of
23  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
24  * Lesser General Public License for more details.
25  *
26  * You should have received a copy of the GNU Lesser General Public
27  * License along with FFmpeg; if not, write to the Free Software
28  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
29  */
30 
31 #include "libavutil/common.h"
32 #include "libavutil/opt.h"
33 #include "libavutil/pixdesc.h"
34 #include "avfilter.h"
35 #include "bwdifdsp.h"
36 #include "ccfifo.h"
37 #include "internal.h"
38 #include "yadif.h"
39 
40 typedef struct BWDIFContext {
43 } BWDIFContext;
44 
45 typedef struct ThreadData {
46  AVFrame *frame;
47  int plane;
48  int w, h;
49  int parity;
50  int tff;
51 } ThreadData;
52 
53 // Round job start line down to multiple of 4 so that if filter_line3 exists
54 // and the frame is a multiple of 4 high then filter_line will never be called
55 static inline int job_start(const int jobnr, const int nb_jobs, const int h)
56 {
57  return jobnr >= nb_jobs ? h : ((h * jobnr) / nb_jobs) & ~3;
58 }
59 
60 static int filter_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
61 {
62  BWDIFContext *s = ctx->priv;
63  YADIFContext *yadif = &s->yadif;
64  ThreadData *td = arg;
65  int linesize = yadif->cur->linesize[td->plane];
66  int clip_max = (1 << (yadif->csp->comp[td->plane].depth)) - 1;
67  int df = (yadif->csp->comp[td->plane].depth + 7) / 8;
68  int refs = linesize / df;
69  int slice_start = job_start(jobnr, nb_jobs, td->h);
70  int slice_end = job_start(jobnr + 1, nb_jobs, td->h);
71  int y;
72 
73  for (y = slice_start; y < slice_end; y++) {
74  if ((y ^ td->parity) & 1) {
75  uint8_t *prev = &yadif->prev->data[td->plane][y * linesize];
76  uint8_t *cur = &yadif->cur ->data[td->plane][y * linesize];
77  uint8_t *next = &yadif->next->data[td->plane][y * linesize];
78  uint8_t *dst = &td->frame->data[td->plane][y * td->frame->linesize[td->plane]];
79  if (yadif->current_field == YADIF_FIELD_END) {
80  s->dsp.filter_intra(dst, cur, td->w, (y + df) < td->h ? refs : -refs,
81  y > (df - 1) ? -refs : refs,
82  (y + 3*df) < td->h ? 3 * refs : -refs,
83  y > (3*df - 1) ? -3 * refs : refs,
84  td->parity ^ td->tff, clip_max);
85  } else if ((y < 4) || ((y + 5) > td->h)) {
86  s->dsp.filter_edge(dst, prev, cur, next, td->w,
87  (y + df) < td->h ? refs : -refs,
88  y > (df - 1) ? -refs : refs,
89  refs << 1, -(refs << 1),
90  td->parity ^ td->tff, clip_max,
91  (y < 2) || ((y + 3) > td->h) ? 0 : 1);
92  } else if (s->dsp.filter_line3 && y + 2 < slice_end && y + 6 < td->h) {
93  s->dsp.filter_line3(dst, td->frame->linesize[td->plane],
94  prev, cur, next, linesize, td->w,
95  td->parity ^ td->tff, clip_max);
96  y += 2;
97  } else {
98  s->dsp.filter_line(dst, prev, cur, next, td->w,
99  refs, -refs, refs << 1, -(refs << 1),
100  3 * refs, -3 * refs, refs << 2, -(refs << 2),
101  td->parity ^ td->tff, clip_max);
102  }
103  } else {
104  memcpy(&td->frame->data[td->plane][y * td->frame->linesize[td->plane]],
105  &yadif->cur->data[td->plane][y * linesize], td->w * df);
106  }
107  }
108  return 0;
109 }
110 
111 static void filter(AVFilterContext *ctx, AVFrame *dstpic,
112  int parity, int tff)
113 {
114  BWDIFContext *bwdif = ctx->priv;
115  YADIFContext *yadif = &bwdif->yadif;
116  ThreadData td = { .frame = dstpic, .parity = parity, .tff = tff };
117  int i;
118 
119  for (i = 0; i < yadif->csp->nb_components; i++) {
120  int w = dstpic->width;
121  int h = dstpic->height;
122 
123  if (i == 1 || i == 2) {
124  w = AV_CEIL_RSHIFT(w, yadif->csp->log2_chroma_w);
125  h = AV_CEIL_RSHIFT(h, yadif->csp->log2_chroma_h);
126  }
127 
128  td.w = w;
129  td.h = h;
130  td.plane = i;
131 
134  }
135  if (yadif->current_field == YADIF_FIELD_END) {
137  }
138 }
139 
140 static const enum AVPixelFormat pix_fmts[] = {
159 };
160 
162 {
163  AVFilterContext *ctx = link->src;
164  BWDIFContext *s = link->src->priv;
165  YADIFContext *yadif = &s->yadif;
166  int ret;
167 
169  if (ret < 0)
170  return AVERROR(EINVAL);
171 
172  yadif->csp = av_pix_fmt_desc_get(link->format);
173  yadif->filter = filter;
174 
175  if (AV_CEIL_RSHIFT(link->w, yadif->csp->log2_chroma_w) < 3 || AV_CEIL_RSHIFT(link->h, yadif->csp->log2_chroma_h) < 4) {
176  av_log(ctx, AV_LOG_ERROR, "Video with planes less than 3 columns or 4 lines is not supported\n");
177  return AVERROR(EINVAL);
178  }
179 
180  ff_bwdif_init_filter_line(&s->dsp, yadif->csp->comp[0].depth);
181 
182  return 0;
183 }
184 
185 
186 #define OFFSET(x) offsetof(YADIFContext, x)
187 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
188 
189 #define CONST(name, help, val, u) { name, help, 0, AV_OPT_TYPE_CONST, {.i64=val}, INT_MIN, INT_MAX, FLAGS, .unit = u }
190 
191 static const AVOption bwdif_options[] = {
192  { "mode", "specify the interlacing mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=YADIF_MODE_SEND_FIELD}, 0, 1, FLAGS, .unit = "mode"},
193  CONST("send_frame", "send one frame for each frame", YADIF_MODE_SEND_FRAME, "mode"),
194  CONST("send_field", "send one frame for each field", YADIF_MODE_SEND_FIELD, "mode"),
195 
196  { "parity", "specify the assumed picture field parity", OFFSET(parity), AV_OPT_TYPE_INT, {.i64=YADIF_PARITY_AUTO}, -1, 1, FLAGS, .unit = "parity" },
197  CONST("tff", "assume top field first", YADIF_PARITY_TFF, "parity"),
198  CONST("bff", "assume bottom field first", YADIF_PARITY_BFF, "parity"),
199  CONST("auto", "auto detect parity", YADIF_PARITY_AUTO, "parity"),
200 
201  { "deint", "specify which frames to deinterlace", OFFSET(deint), AV_OPT_TYPE_INT, {.i64=YADIF_DEINT_ALL}, 0, 1, FLAGS, .unit = "deint" },
202  CONST("all", "deinterlace all frames", YADIF_DEINT_ALL, "deint"),
203  CONST("interlaced", "only deinterlace frames marked as interlaced", YADIF_DEINT_INTERLACED, "deint"),
204 
205  { NULL }
206 };
207 
208 AVFILTER_DEFINE_CLASS(bwdif);
209 
211  {
212  .name = "default",
213  .type = AVMEDIA_TYPE_VIDEO,
214  .filter_frame = ff_yadif_filter_frame,
215  },
216 };
217 
219  {
220  .name = "default",
221  .type = AVMEDIA_TYPE_VIDEO,
222  .request_frame = ff_yadif_request_frame,
223  .config_props = config_props,
224  },
225 };
226 
228  .name = "bwdif",
229  .description = NULL_IF_CONFIG_SMALL("Deinterlace the input image."),
230  .priv_size = sizeof(BWDIFContext),
231  .priv_class = &bwdif_class,
237 };
AV_PIX_FMT_YUVA422P16
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:522
YADIF_MODE_SEND_FIELD
@ YADIF_MODE_SEND_FIELD
send 1 frame for each field
Definition: yadif.h:29
AV_PIX_FMT_GBRAP16
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:501
td
#define td
Definition: regdef.h:70
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
df
#define df(A, B)
Definition: vf_xbr.c:90
CONST
#define CONST(name, help, val, u)
Definition: vf_bwdif.c:189
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2965
FILTER_PIXFMTS_ARRAY
#define FILTER_PIXFMTS_ARRAY(array)
Definition: internal.h:162
YADIFContext::csp
const AVPixFmtDescriptor * csp
Definition: yadif.h:76
AV_PIX_FMT_YUVA422P9
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:514
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
pixdesc.h
AVFrame::width
int width
Definition: frame.h:446
AV_PIX_FMT_YUVA420P16
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:521
w
uint8_t w
Definition: llviddspenc.c:38
AVComponentDescriptor::depth
int depth
Number of bits in the component.
Definition: pixdesc.h:57
AV_PIX_FMT_YUVA420P10
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:516
AVOption
AVOption.
Definition: opt.h:346
ThreadData::tff
int tff
Definition: vf_bwdif.c:50
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:478
ThreadData::frame
AVFrame * frame
Definition: dsddec.c:72
ThreadData::w
int w
Definition: vf_blend.c:60
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:106
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
AV_PIX_FMT_YUVA422P10
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:517
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(bwdif)
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:395
YADIF_MODE_SEND_FRAME
@ YADIF_MODE_SEND_FRAME
send 1 frame for each frame
Definition: yadif.h:28
AV_PIX_FMT_YUVA420P9
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:513
ff_yadif_config_output_common
int ff_yadif_config_output_common(AVFilterLink *outlink)
Definition: yadif_common.c:218
config_props
static int config_props(AVFilterLink *link)
Definition: vf_bwdif.c:161
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:496
ff_bwdif_init_filter_line
av_cold void ff_bwdif_init_filter_line(BWDIFDSPContext *s, int bit_depth)
Definition: bwdifdsp.c:208
AV_PIX_FMT_GBRAP
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:212
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:494
AV_PIX_FMT_YUVA444P16
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:523
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:476
YADIF_PARITY_AUTO
@ YADIF_PARITY_AUTO
auto detection
Definition: yadif.h:37
avfilter_vf_bwdif_outputs
static const AVFilterPad avfilter_vf_bwdif_outputs[]
Definition: vf_bwdif.c:218
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:462
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:33
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:481
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:283
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:490
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:86
ThreadData::plane
int plane
Definition: vf_blend.c:59
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:108
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:491
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:59
FLAGS
#define FLAGS
Definition: vf_bwdif.c:187
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:1730
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:475
bwdif_options
static const AVOption bwdif_options[]
Definition: vf_bwdif.c:191
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:489
ctx
AVFormatContext * ctx
Definition: movenc.c:49
AVPixFmtDescriptor::log2_chroma_w
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:80
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:182
ThreadData::h
int h
Definition: vf_blend.c:60
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:87
arg
const char * arg
Definition: jacosubdec.c:67
AV_PIX_FMT_GBRP16
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:497
NULL
#define NULL
Definition: coverity.c:32
AVPixFmtDescriptor::nb_components
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:71
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
OFFSET
#define OFFSET(x)
Definition: vf_bwdif.c:186
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:479
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:493
BWDIFDSPContext
Definition: bwdifdsp.h:25
yadif.h
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
ThreadData::parity
int parity
Definition: vf_bwdif.c:49
filter
static void filter(AVFilterContext *ctx, AVFrame *dstpic, int parity, int tff)
Definition: vf_bwdif.c:111
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: vf_bwdif.c:140
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:483
YADIFContext::filter
void(* filter)(AVFilterContext *ctx, AVFrame *dstpic, int parity, int tff)
Definition: yadif.h:65
parity
mcdeint parity
Definition: vf_mcdeint.c:281
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:485
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:461
filter_slice
static int filter_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_bwdif.c:60
YADIFContext::prev
AVFrame * prev
Definition: yadif.h:62
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:174
AV_PIX_FMT_YUVA444P10
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:518
internal.h
uninit
static void uninit(AVBSFContext *ctx)
Definition: pcm_rechunk.c:68
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:495
common.h
ff_vf_bwdif
const AVFilter ff_vf_bwdif
Definition: vf_bwdif.c:227
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:827
ThreadData
Used for passing data between threads.
Definition: dsddec.c:71
YADIFContext
Definition: yadif.h:51
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:107
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:39
BWDIFContext::yadif
YADIFContext yadif
Definition: vf_bwdif.c:41
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:477
AVFilter
Filter definition.
Definition: avfilter.h:166
ret
ret
Definition: filter_design.txt:187
job_start
static int job_start(const int jobnr, const int nb_jobs, const int h)
Definition: vf_bwdif.c:55
YADIF_DEINT_ALL
@ YADIF_DEINT_ALL
deinterlace all frames
Definition: yadif.h:41
avfilter_vf_bwdif_inputs
static const AVFilterPad avfilter_vf_bwdif_inputs[]
Definition: vf_bwdif.c:210
AV_PIX_FMT_YUVA444P9
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:515
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:482
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:487
bwdifdsp.h
AVFrame::height
int height
Definition: frame.h:446
YADIFContext::next
AVFrame * next
Definition: yadif.h:61
YADIF_FIELD_END
@ YADIF_FIELD_END
The first or last field in a sequence.
Definition: yadif.h:47
BWDIFContext
Definition: vf_bwdif.c:40
mode
mode
Definition: ebur128.h:83
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
ff_yadif_request_frame
int ff_yadif_request_frame(AVFilterLink *link)
Definition: yadif_common.c:184
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:235
avfilter.h
AVPixFmtDescriptor::comp
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:105
slice_start
static int slice_start(SliceContext *sc, VVCContext *s, VVCFrameContext *fc, const CodedBitstreamUnit *unit, const int is_first_slice)
Definition: dec.c:688
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
YADIF_DEINT_INTERLACED
@ YADIF_DEINT_INTERLACED
only deinterlace frames marked as interlaced
Definition: yadif.h:42
YADIF_FIELD_NORMAL
@ YADIF_FIELD_NORMAL
A normal field in the middle of a sequence.
Definition: yadif.h:48
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:165
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
YADIF_PARITY_TFF
@ YADIF_PARITY_TFF
top field first
Definition: yadif.h:35
BWDIFContext::dsp
BWDIFDSPContext dsp
Definition: vf_bwdif.c:42
ff_yadif_uninit
void ff_yadif_uninit(AVFilterContext *ctx)
Definition: yadif_common.c:256
YADIFContext::current_field
int current_field
YADIFCurrentField.
Definition: yadif.h:88
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:183
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:80
AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
#define AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
Same as AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, except that the filter will have its filter_frame() c...
Definition: avfilter.h:155
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:419
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:79
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
ccfifo.h
h
h
Definition: vp9dsp_template.c:2038
YADIFContext::cur
AVFrame * cur
Definition: yadif.h:60
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:488
ff_filter_execute
static av_always_inline int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: internal.h:134
AVPixFmtDescriptor::log2_chroma_h
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:89
AV_PIX_FMT_YUVA422P
@ AV_PIX_FMT_YUVA422P
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:173
ff_yadif_filter_frame
int ff_yadif_filter_frame(AVFilterLink *link, AVFrame *frame)
Definition: yadif_common.c:104
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:486
YADIF_PARITY_BFF
@ YADIF_PARITY_BFF
bottom field first
Definition: yadif.h:36