FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
vf_w3fdif.c
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2012 British Broadcasting Corporation, All Rights Reserved
3  * Author of de-interlace algorithm: Jim Easterbrook for BBC R&D
4  * Based on the process described by Martin Weston for BBC R&D
5  * Author of FFmpeg filter: Mark Himsley for BBC Broadcast Systems Development
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include "libavutil/common.h"
25 #include "libavutil/imgutils.h"
26 #include "libavutil/opt.h"
27 #include "libavutil/pixdesc.h"
28 #include "avfilter.h"
29 #include "formats.h"
30 #include "internal.h"
31 #include "video.h"
32 
33 typedef struct W3FDIFContext {
34  const AVClass *class;
35  int filter; ///< 0 is simple, 1 is more complex
36  int deint; ///< which frames to deinterlace
37  int linesize[4]; ///< bytes of pixel data per line for each plane
38  int planeheight[4]; ///< height of each plane
39  int field; ///< which field are we on, 0 or 1
40  int eof;
41  int nb_planes;
42  AVFrame *prev, *cur, *next; ///< previous, current, next frames
43  int32_t **work_line; ///< lines we are calculating
46 
47 #define OFFSET(x) offsetof(W3FDIFContext, x)
48 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
49 #define CONST(name, help, val, unit) { name, help, 0, AV_OPT_TYPE_CONST, {.i64=val}, 0, 0, FLAGS, unit }
50 
51 static const AVOption w3fdif_options[] = {
52  { "filter", "specify the filter", OFFSET(filter), AV_OPT_TYPE_INT, {.i64=1}, 0, 1, FLAGS, "filter" },
53  CONST("simple", NULL, 0, "filter"),
54  CONST("complex", NULL, 1, "filter"),
55  { "deint", "specify which frames to deinterlace", OFFSET(deint), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, FLAGS, "deint" },
56  CONST("all", "deinterlace all frames", 0, "deint"),
57  CONST("interlaced", "only deinterlace frames marked as interlaced", 1, "deint"),
58  { NULL }
59 };
60 
61 AVFILTER_DEFINE_CLASS(w3fdif);
62 
64 {
65  static const enum AVPixelFormat pix_fmts[] = {
76  };
77 
78  AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
79  if (!fmts_list)
80  return AVERROR(ENOMEM);
81  return ff_set_common_formats(ctx, fmts_list);
82 }
83 
84 static int config_input(AVFilterLink *inlink)
85 {
86  AVFilterContext *ctx = inlink->dst;
87  W3FDIFContext *s = ctx->priv;
88  const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
89  int ret, i;
90 
91  if ((ret = av_image_fill_linesizes(s->linesize, inlink->format, inlink->w)) < 0)
92  return ret;
93 
94  s->planeheight[1] = s->planeheight[2] = FF_CEIL_RSHIFT(inlink->h, desc->log2_chroma_h);
95  s->planeheight[0] = s->planeheight[3] = inlink->h;
96 
98  s->nb_threads = ctx->graph->nb_threads;
99  s->work_line = av_calloc(s->nb_threads, sizeof(*s->work_line));
100  if (!s->work_line)
101  return AVERROR(ENOMEM);
102 
103  for (i = 0; i < s->nb_threads; i++) {
104  s->work_line[i] = av_calloc(s->linesize[0], sizeof(*s->work_line[0]));
105  if (!s->work_line[i])
106  return AVERROR(ENOMEM);
107  }
108 
109  return 0;
110 }
111 
112 static int config_output(AVFilterLink *outlink)
113 {
114  AVFilterLink *inlink = outlink->src->inputs[0];
115 
116  outlink->time_base.num = inlink->time_base.num;
117  outlink->time_base.den = inlink->time_base.den * 2;
118  outlink->frame_rate.num = inlink->frame_rate.num * 2;
119  outlink->frame_rate.den = inlink->frame_rate.den;
120  outlink->flags |= FF_LINK_FLAG_REQUEST_LOOP;
121 
122  return 0;
123 }
124 
125 /*
126  * Filter coefficients from PH-2071, scaled by 256 * 256.
127  * Each set of coefficients has a set for low-frequencies and high-frequencies.
128  * n_coef_lf[] and n_coef_hf[] are the number of coefs for simple and more-complex.
129  * It is important for later that n_coef_lf[] is even and n_coef_hf[] is odd.
130  * coef_lf[][] and coef_hf[][] are the coefficients for low-frequencies
131  * and high-frequencies for simple and more-complex mode.
132  */
133 static const int8_t n_coef_lf[2] = { 2, 4 };
134 static const int32_t coef_lf[2][4] = {{ 32768, 32768, 0, 0},
135  { -1704, 34472, 34472, -1704}};
136 static const int8_t n_coef_hf[2] = { 3, 5 };
137 static const int32_t coef_hf[2][5] = {{ -4096, 8192, -4096, 0, 0},
138  { 2032, -7602, 11140, -7602, 2032}};
139 
140 typedef struct ThreadData {
142  int plane;
143 } ThreadData;
144 
145 static int deinterlace_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
146 {
147  W3FDIFContext *s = ctx->priv;
148  ThreadData *td = arg;
149  AVFrame *out = td->out;
150  AVFrame *cur = td->cur;
151  AVFrame *adj = td->adj;
152  const int plane = td->plane;
153  const int filter = s->filter;
154  uint8_t *in_line, *in_lines_cur[5], *in_lines_adj[5];
155  uint8_t *out_line, *out_pixel;
156  int32_t *work_line, *work_pixel;
157  uint8_t *cur_data = cur->data[plane];
158  uint8_t *adj_data = adj->data[plane];
159  uint8_t *dst_data = out->data[plane];
160  const int linesize = s->linesize[plane];
161  const int height = s->planeheight[plane];
162  const int cur_line_stride = cur->linesize[plane];
163  const int adj_line_stride = adj->linesize[plane];
164  const int dst_line_stride = out->linesize[plane];
165  const int start = (height * jobnr) / nb_jobs;
166  const int end = (height * (jobnr+1)) / nb_jobs;
167  int i, j, y_in, y_out;
168 
169  /* copy unchanged the lines of the field */
170  y_out = start + (s->field == cur->top_field_first) - (start & 1);
171 
172  in_line = cur_data + (y_out * cur_line_stride);
173  out_line = dst_data + (y_out * dst_line_stride);
174 
175  while (y_out < end) {
176  memcpy(out_line, in_line, linesize);
177  y_out += 2;
178  in_line += cur_line_stride * 2;
179  out_line += dst_line_stride * 2;
180  }
181 
182  /* interpolate other lines of the field */
183  y_out = start + (s->field != cur->top_field_first) - (start & 1);
184 
185  out_line = dst_data + (y_out * dst_line_stride);
186 
187  while (y_out < end) {
188  /* clear workspace */
189  memset(s->work_line[jobnr], 0, sizeof(*s->work_line[jobnr]) * linesize);
190 
191  /* get low vertical frequencies from current field */
192  for (j = 0; j < n_coef_lf[filter]; j++) {
193  y_in = (y_out + 1) + (j * 2) - n_coef_lf[filter];
194 
195  while (y_in < 0)
196  y_in += 2;
197  while (y_in >= height)
198  y_in -= 2;
199 
200  in_lines_cur[j] = cur_data + (y_in * cur_line_stride);
201  }
202 
203  work_line = s->work_line[jobnr];
204  switch (n_coef_lf[filter]) {
205  case 2:
206  for (i = 0; i < linesize; i++) {
207  *work_line += *in_lines_cur[0]++ * coef_lf[filter][0];
208  *work_line++ += *in_lines_cur[1]++ * coef_lf[filter][1];
209  }
210  break;
211  case 4:
212  for (i = 0; i < linesize; i++) {
213  *work_line += *in_lines_cur[0]++ * coef_lf[filter][0];
214  *work_line += *in_lines_cur[1]++ * coef_lf[filter][1];
215  *work_line += *in_lines_cur[2]++ * coef_lf[filter][2];
216  *work_line++ += *in_lines_cur[3]++ * coef_lf[filter][3];
217  }
218  }
219 
220  /* get high vertical frequencies from adjacent fields */
221  for (j = 0; j < n_coef_hf[filter]; j++) {
222  y_in = (y_out + 1) + (j * 2) - n_coef_hf[filter];
223 
224  while (y_in < 0)
225  y_in += 2;
226  while (y_in >= height)
227  y_in -= 2;
228 
229  in_lines_cur[j] = cur_data + (y_in * cur_line_stride);
230  in_lines_adj[j] = adj_data + (y_in * adj_line_stride);
231  }
232 
233  work_line = s->work_line[jobnr];
234  switch (n_coef_hf[filter]) {
235  case 3:
236  for (i = 0; i < linesize; i++) {
237  *work_line += *in_lines_cur[0]++ * coef_hf[filter][0];
238  *work_line += *in_lines_adj[0]++ * coef_hf[filter][0];
239  *work_line += *in_lines_cur[1]++ * coef_hf[filter][1];
240  *work_line += *in_lines_adj[1]++ * coef_hf[filter][1];
241  *work_line += *in_lines_cur[2]++ * coef_hf[filter][2];
242  *work_line++ += *in_lines_adj[2]++ * coef_hf[filter][2];
243  }
244  break;
245  case 5:
246  for (i = 0; i < linesize; i++) {
247  *work_line += *in_lines_cur[0]++ * coef_hf[filter][0];
248  *work_line += *in_lines_adj[0]++ * coef_hf[filter][0];
249  *work_line += *in_lines_cur[1]++ * coef_hf[filter][1];
250  *work_line += *in_lines_adj[1]++ * coef_hf[filter][1];
251  *work_line += *in_lines_cur[2]++ * coef_hf[filter][2];
252  *work_line += *in_lines_adj[2]++ * coef_hf[filter][2];
253  *work_line += *in_lines_cur[3]++ * coef_hf[filter][3];
254  *work_line += *in_lines_adj[3]++ * coef_hf[filter][3];
255  *work_line += *in_lines_cur[4]++ * coef_hf[filter][4];
256  *work_line++ += *in_lines_adj[4]++ * coef_hf[filter][4];
257  }
258  }
259 
260  /* save scaled result to the output frame, scaling down by 256 * 256 */
261  work_pixel = s->work_line[jobnr];
262  out_pixel = out_line;
263 
264  for (j = 0; j < linesize; j++, out_pixel++, work_pixel++)
265  *out_pixel = av_clip(*work_pixel, 0, 255 * 256 * 256) >> 16;
266 
267  /* move on to next line */
268  y_out += 2;
269  out_line += dst_line_stride * 2;
270  }
271 
272  return 0;
273 }
274 
275 static int filter(AVFilterContext *ctx, int is_second)
276 {
277  W3FDIFContext *s = ctx->priv;
278  AVFilterLink *outlink = ctx->outputs[0];
279  AVFrame *out, *adj;
280  ThreadData td;
281  int plane;
282 
283  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
284  if (!out)
285  return AVERROR(ENOMEM);
286  av_frame_copy_props(out, s->cur);
287  out->interlaced_frame = 0;
288 
289  if (!is_second) {
290  if (out->pts != AV_NOPTS_VALUE)
291  out->pts *= 2;
292  } else {
293  int64_t cur_pts = s->cur->pts;
294  int64_t next_pts = s->next->pts;
295 
296  if (next_pts != AV_NOPTS_VALUE && cur_pts != AV_NOPTS_VALUE) {
297  out->pts = cur_pts + next_pts;
298  } else {
299  out->pts = AV_NOPTS_VALUE;
300  }
301  }
302 
303  adj = s->field ? s->next : s->prev;
304  td.out = out; td.cur = s->cur; td.adj = adj;
305  for (plane = 0; plane < s->nb_planes; plane++) {
306  td.plane = plane;
307  ctx->internal->execute(ctx, deinterlace_slice, &td, NULL, FFMIN(s->planeheight[plane], s->nb_threads));
308  }
309 
310  s->field = !s->field;
311 
312  return ff_filter_frame(outlink, out);
313 }
314 
315 static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
316 {
317  AVFilterContext *ctx = inlink->dst;
318  W3FDIFContext *s = ctx->priv;
319  int ret;
320 
321  av_frame_free(&s->prev);
322  s->prev = s->cur;
323  s->cur = s->next;
324  s->next = frame;
325 
326  if (!s->cur) {
327  s->cur = av_frame_clone(s->next);
328  if (!s->cur)
329  return AVERROR(ENOMEM);
330  }
331 
332  if ((s->deint && !s->cur->interlaced_frame) || ctx->is_disabled) {
333  AVFrame *out = av_frame_clone(s->cur);
334  if (!out)
335  return AVERROR(ENOMEM);
336 
337  av_frame_free(&s->prev);
338  if (out->pts != AV_NOPTS_VALUE)
339  out->pts *= 2;
340  return ff_filter_frame(ctx->outputs[0], out);
341  }
342 
343  if (!s->prev)
344  return 0;
345 
346  ret = filter(ctx, 0);
347  if (ret < 0)
348  return ret;
349 
350  return filter(ctx, 1);
351 }
352 
353 static int request_frame(AVFilterLink *outlink)
354 {
355  AVFilterContext *ctx = outlink->src;
356  W3FDIFContext *s = ctx->priv;
357 
358  do {
359  int ret;
360 
361  if (s->eof)
362  return AVERROR_EOF;
363 
364  ret = ff_request_frame(ctx->inputs[0]);
365 
366  if (ret == AVERROR_EOF && s->cur) {
367  AVFrame *next = av_frame_clone(s->next);
368  if (!next)
369  return AVERROR(ENOMEM);
370  next->pts = s->next->pts * 2 - s->cur->pts;
371  filter_frame(ctx->inputs[0], next);
372  s->eof = 1;
373  } else if (ret < 0) {
374  return ret;
375  }
376  } while (!s->cur);
377 
378  return 0;
379 }
380 
381 static av_cold void uninit(AVFilterContext *ctx)
382 {
383  W3FDIFContext *s = ctx->priv;
384  int i;
385 
386  av_frame_free(&s->prev);
387  av_frame_free(&s->cur );
388  av_frame_free(&s->next);
389 
390  for (i = 0; i < s->nb_threads; i++)
391  av_freep(&s->work_line[i]);
392 
393  av_freep(&s->work_line);
394 }
395 
396 static const AVFilterPad w3fdif_inputs[] = {
397  {
398  .name = "default",
399  .type = AVMEDIA_TYPE_VIDEO,
400  .filter_frame = filter_frame,
401  .config_props = config_input,
402  },
403  { NULL }
404 };
405 
406 static const AVFilterPad w3fdif_outputs[] = {
407  {
408  .name = "default",
409  .type = AVMEDIA_TYPE_VIDEO,
410  .config_props = config_output,
411  .request_frame = request_frame,
412  },
413  { NULL }
414 };
415 
417  .name = "w3fdif",
418  .description = NULL_IF_CONFIG_SMALL("Apply Martin Weston three field deinterlace."),
419  .priv_size = sizeof(W3FDIFContext),
420  .priv_class = &w3fdif_class,
421  .uninit = uninit,
423  .inputs = w3fdif_inputs,
424  .outputs = w3fdif_outputs,
426 };
int plane
Definition: avisynth_c.h:291
#define NULL
Definition: coverity.c:32
const char * s
Definition: avisynth_c.h:631
AVFrame * out
static int filter(AVFilterContext *ctx, int is_second)
Definition: vf_w3fdif.c:275
AVFrame * prev
Definition: vf_w3fdif.c:42
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2129
This structure describes decoded (raw) audio or video data.
Definition: frame.h:171
AVOption.
Definition: opt.h:255
AVFrame * cur
Definition: vf_w3fdif.c:42
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:68
misc image utilities
static const AVFilterPad outputs[]
Definition: af_ashowinfo.c:248
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2169
Main libavfilter public API header.
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:188
int num
numerator
Definition: rational.h:44
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:109
int is_disabled
the enabled state from the last expression evaluation
Definition: avfilter.h:686
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
struct AVFilterGraph * graph
filtergraph this filter belongs to
Definition: avfilter.h:656
const char * name
Pad name.
Definition: internal.h:69
static const AVOption w3fdif_options[]
Definition: vf_w3fdif.c:51
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:641
static int config_input(AVFilterLink *inlink)
Definition: vf_w3fdif.c:84
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1158
#define FLAGS
Definition: vf_w3fdif.c:48
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:103
uint8_t
#define av_cold
Definition: attributes.h:74
AVOptions.
static const int32_t coef_lf[2][4]
Definition: vf_w3fdif.c:134
AVFILTER_DEFINE_CLASS(w3fdif)
static av_cold int end(AVCodecContext *avctx)
Definition: avrndec.c:90
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:257
static AVFrame * frame
int plane
Definition: vf_blend.c:87
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range...
Definition: pixfmt.h:102
int nb_threads
Maximum number of threads used by filters in this graph.
Definition: avfilter.h:1207
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:76
#define AVERROR_EOF
End of file.
Definition: error.h:55
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:367
A filter pad used for either input or output.
Definition: internal.h:63
int linesize[4]
bytes of pixel data per line for each plane
Definition: vf_w3fdif.c:37
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:281
AVFrame * next
previous, current, next frames
Definition: vf_w3fdif.c:42
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:542
#define td
Definition: regdef.h:70
static int query_formats(AVFilterContext *ctx)
Definition: vf_w3fdif.c:63
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:89
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:148
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:175
void * priv
private data for use by the filter
Definition: avfilter.h:654
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:442
#define OFFSET(x)
Definition: vf_w3fdif.c:47
const char * arg
Definition: jacosubdec.c:66
int planeheight[4]
height of each plane
Definition: vf_w3fdif.c:38
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:67
#define FFMIN(a, b)
Definition: common.h:81
int field
which field are we on, 0 or 1
Definition: vf_w3fdif.c:39
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:75
#define FF_CEIL_RSHIFT(a, b)
Definition: common.h:57
int32_t
static const int8_t n_coef_hf[2]
Definition: vf_w3fdif.c:136
Frame requests may need to loop in order to be fulfilled.
Definition: internal.h:374
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:451
AVFilter ff_vf_w3fdif
Definition: vf_w3fdif.c:416
static int config_output(AVFilterLink *outlink)
Definition: vf_w3fdif.c:112
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:199
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:280
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
AVFrame * adj
Definition: vf_w3fdif.c:141
static const int8_t n_coef_lf[2]
Definition: vf_w3fdif.c:133
AVFrame * cur
Definition: vf_w3fdif.c:141
BYTE int const BYTE int int int height
Definition: avisynth_c.h:676
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:69
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:470
int av_image_fill_linesizes(int linesizes[4], enum AVPixelFormat pix_fmt, int width)
Fill plane linesizes for an image with pixel format pix_fmt and width width.
Definition: imgutils.c:88
static const AVFilterPad inputs[]
Definition: af_ashowinfo.c:239
const char * name
Filter name.
Definition: avfilter.h:474
#define AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
Same as AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, except that the filter will have its filter_frame() c...
Definition: avfilter.h:459
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:648
static const int32_t coef_hf[2][5]
Definition: vf_w3fdif.c:137
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:209
void * av_calloc(size_t nmemb, size_t size)
Allocate a block of nmemb * size bytes with alignment suitable for all memory accesses (including vec...
Definition: mem.c:260
AVFilterInternal * internal
An opaque struct for libavfilter internal use.
Definition: avfilter.h:679
static int flags
Definition: cpu.c:47
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:182
#define CONST(name, help, val, unit)
Definition: vf_w3fdif.c:49
static const AVFilterPad w3fdif_inputs[]
Definition: vf_w3fdif.c:396
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:63
Y , 8bpp.
Definition: pixfmt.h:71
common internal and external API header
static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
Definition: vf_w3fdif.c:315
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:299
int deint
which frames to deinterlace
Definition: vf_w3fdif.c:36
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:77
static int deinterlace_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_w3fdif.c:145
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:70
int den
denominator
Definition: rational.h:45
avfilter_execute_func * execute
Definition: internal.h:164
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:372
static const AVFilterPad w3fdif_outputs[]
Definition: vf_w3fdif.c:406
A list of supported formats for one end of a filter link.
Definition: formats.h:64
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:302
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(constint16_t *) pi >>8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(constint32_t *) pi >>24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(constfloat *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(constfloat *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(constfloat *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(constdouble *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(constdouble *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(constdouble *) pi *(1U<< 31))))#defineSET_CONV_FUNC_GROUP(ofmt, ifmt) staticvoidset_generic_function(AudioConvert *ac){}voidff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enumAVSampleFormatout_fmt, enumAVSampleFormatin_fmt, intchannels, intsample_rate, intapply_map){AudioConvert *ac;intin_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) returnNULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt)>2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);returnNULL;}returnac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}elseif(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;elseac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);returnac;}intff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){intuse_generic=1;intlen=in->nb_samples;intp;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%dsamples-audio_convert:%sto%s(dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));returnff_convert_dither(ac-> out
An instance of a filter.
Definition: avfilter.h:633
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_w3fdif.c:381
#define av_freep(p)
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:101
void INT64 start
Definition: avisynth_c.h:553
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:343
int32_t ** work_line
lines we are calculating
Definition: vf_w3fdif.c:43
static int request_frame(AVFilterLink *outlink)
Definition: vf_w3fdif.c:353
internal API functions
int nb_threads
Definition: vf_w3fdif.c:44
AVPixelFormat
Pixel format.
Definition: pixfmt.h:61
int filter
0 is simple, 1 is more complex
Definition: vf_w3fdif.c:35
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:553
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:240