FFmpeg
vf_colorize.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "libavutil/opt.h"
20 #include "libavutil/imgutils.h"
21 #include "avfilter.h"
22 #include "formats.h"
23 #include "internal.h"
24 #include "video.h"
25 
26 typedef struct ColorizeContext {
27  const AVClass *class;
28 
29  float hue;
30  float saturation;
31  float lightness;
32  float mix;
33 
34  int depth;
35  int c[3];
36  int planewidth[4];
37  int planeheight[4];
38 
40  int jobnr, int nb_jobs);
42 
43 static inline float lerpf(float v0, float v1, float f)
44 {
45  return v0 + (v1 - v0) * f;
46 }
47 
48 static int colorizey_slice8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
49 {
50  ColorizeContext *s = ctx->priv;
51  AVFrame *frame = arg;
52  const int width = s->planewidth[0];
53  const int height = s->planeheight[0];
54  const int slice_start = (height * jobnr) / nb_jobs;
55  const int slice_end = (height * (jobnr + 1)) / nb_jobs;
56  const int ylinesize = frame->linesize[0];
57  uint8_t *yptr = frame->data[0] + slice_start * ylinesize;
58  const int yv = s->c[0];
59  const float mix = s->mix;
60 
61  for (int y = slice_start; y < slice_end; y++) {
62  for (int x = 0; x < width; x++)
63  yptr[x] = lerpf(yv, yptr[x], mix);
64 
65  yptr += ylinesize;
66  }
67 
68  return 0;
69 }
70 
71 static int colorizey_slice16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
72 {
73  ColorizeContext *s = ctx->priv;
74  AVFrame *frame = arg;
75  const int width = s->planewidth[0];
76  const int height = s->planeheight[0];
77  const int slice_start = (height * jobnr) / nb_jobs;
78  const int slice_end = (height * (jobnr + 1)) / nb_jobs;
79  const int ylinesize = frame->linesize[0] / 2;
80  uint16_t *yptr = (uint16_t *)frame->data[0] + slice_start * ylinesize;
81  const int yv = s->c[0];
82  const float mix = s->mix;
83 
84  for (int y = slice_start; y < slice_end; y++) {
85  for (int x = 0; x < width; x++)
86  yptr[x] = lerpf(yv, yptr[x], mix);
87 
88  yptr += ylinesize;
89  }
90 
91  return 0;
92 }
93 
94 static int colorize_slice8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
95 {
96  ColorizeContext *s = ctx->priv;
97  AVFrame *frame = arg;
98  const int width = s->planewidth[1];
99  const int height = s->planeheight[1];
100  const int slice_start = (height * jobnr) / nb_jobs;
101  const int slice_end = (height * (jobnr + 1)) / nb_jobs;
102  const int ulinesize = frame->linesize[1];
103  const int vlinesize = frame->linesize[2];
104  uint8_t *uptr = frame->data[1] + slice_start * ulinesize;
105  uint8_t *vptr = frame->data[2] + slice_start * vlinesize;
106  const int u = s->c[1];
107  const int v = s->c[2];
108 
109  for (int y = slice_start; y < slice_end; y++) {
110  for (int x = 0; x < width; x++) {
111  uptr[x] = u;
112  vptr[x] = v;
113  }
114 
115  uptr += ulinesize;
116  vptr += vlinesize;
117  }
118 
119  return 0;
120 }
121 
122 static int colorize_slice16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
123 {
124  ColorizeContext *s = ctx->priv;
125  AVFrame *frame = arg;
126  const int width = s->planewidth[1];
127  const int height = s->planeheight[1];
128  const int slice_start = (height * jobnr) / nb_jobs;
129  const int slice_end = (height * (jobnr + 1)) / nb_jobs;
130  const int ulinesize = frame->linesize[1] / 2;
131  const int vlinesize = frame->linesize[2] / 2;
132  uint16_t *uptr = (uint16_t *)frame->data[1] + slice_start * ulinesize;
133  uint16_t *vptr = (uint16_t *)frame->data[2] + slice_start * vlinesize;
134  const int u = s->c[1];
135  const int v = s->c[2];
136 
137  for (int y = slice_start; y < slice_end; y++) {
138  for (int x = 0; x < width; x++) {
139  uptr[x] = u;
140  vptr[x] = v;
141  }
142 
143  uptr += ulinesize;
144  vptr += vlinesize;
145  }
146 
147  return 0;
148 }
149 
150 static int do_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
151 {
152  ColorizeContext *s = ctx->priv;
153 
154  s->do_plane_slice[0](ctx, arg, jobnr, nb_jobs);
155  s->do_plane_slice[1](ctx, arg, jobnr, nb_jobs);
156 
157  return 0;
158 }
159 
160 static float hue2rgb(float p, float q, float t)
161 {
162  if (t < 0.f) t += 1.f;
163  if (t > 1.f) t -= 1.f;
164  if (t < 1.f/6.f) return p + (q - p) * 6.f * t;
165  if (t < 1.f/2.f) return q;
166  if (t < 2.f/3.f) return p + (q - p) * (2.f/3.f - t) * 6.f;
167 
168  return p;
169 }
170 
171 static void hsl2rgb(float h, float s, float l, float *r, float *g, float *b)
172 {
173  h /= 360.f;
174 
175  if (s == 0.f) {
176  *r = *g = *b = l;
177  } else {
178  const float q = l < 0.5f ? l * (1.f + s) : l + s - l * s;
179  const float p = 2.f * l - q;
180 
181  *r = hue2rgb(p, q, h + 1.f / 3.f);
182  *g = hue2rgb(p, q, h);
183  *b = hue2rgb(p, q, h - 1.f / 3.f);
184  }
185 }
186 
187 static void rgb2yuv(float r, float g, float b, int *y, int *u, int *v, int depth)
188 {
189  *y = ((0.21260*219.0/255.0) * r + (0.71520*219.0/255.0) * g +
190  (0.07220*219.0/255.0) * b) * ((1 << depth) - 1);
191  *u = (-(0.11457*224.0/255.0) * r - (0.38543*224.0/255.0) * g +
192  (0.50000*224.0/255.0) * b + 0.5) * ((1 << depth) - 1);
193  *v = ((0.50000*224.0/255.0) * r - (0.45415*224.0/255.0) * g -
194  (0.04585*224.0/255.0) * b + 0.5) * ((1 << depth) - 1);
195 }
196 
198 {
199  AVFilterContext *ctx = inlink->dst;
200  ColorizeContext *s = ctx->priv;
201  float c[3];
202 
203  hsl2rgb(s->hue, s->saturation, s->lightness, &c[0], &c[1], &c[2]);
204  rgb2yuv(c[0], c[1], c[2], &s->c[0], &s->c[1], &s->c[2], s->depth);
205 
206  ctx->internal->execute(ctx, do_slice, frame, NULL,
208 
209  return ff_filter_frame(ctx->outputs[0], frame);
210 }
211 
213 {
214  static const enum AVPixelFormat pixel_fmts[] = {
233  };
234 
236 
237  formats = ff_make_format_list(pixel_fmts);
238  if (!formats)
239  return AVERROR(ENOMEM);
240 
241  return ff_set_common_formats(ctx, formats);
242 }
243 
245 {
246  AVFilterContext *ctx = inlink->dst;
247  ColorizeContext *s = ctx->priv;
249  int depth;
250 
251  s->depth = depth = desc->comp[0].depth;
252 
253  s->planewidth[1] = s->planewidth[2] = AV_CEIL_RSHIFT(inlink->w, desc->log2_chroma_w);
254  s->planewidth[0] = s->planewidth[3] = inlink->w;
255  s->planeheight[1] = s->planeheight[2] = AV_CEIL_RSHIFT(inlink->h, desc->log2_chroma_h);
256  s->planeheight[0] = s->planeheight[3] = inlink->h;
257 
258  s->do_plane_slice[0] = depth <= 8 ? colorizey_slice8 : colorizey_slice16;
259  s->do_plane_slice[1] = depth <= 8 ? colorize_slice8 : colorize_slice16;
260 
261  return 0;
262 }
263 
264 static const AVFilterPad colorize_inputs[] = {
265  {
266  .name = "default",
267  .type = AVMEDIA_TYPE_VIDEO,
268  .needs_writable = 1,
269  .filter_frame = filter_frame,
270  .config_props = config_input,
271  },
272  { NULL }
273 };
274 
275 static const AVFilterPad colorize_outputs[] = {
276  {
277  .name = "default",
278  .type = AVMEDIA_TYPE_VIDEO,
279  },
280  { NULL }
281 };
282 
283 #define OFFSET(x) offsetof(ColorizeContext, x)
284 #define VF AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
285 
286 static const AVOption colorize_options[] = {
287  { "hue", "set the hue", OFFSET(hue), AV_OPT_TYPE_FLOAT, {.dbl=0}, 0, 360, VF },
288  { "saturation", "set the saturation", OFFSET(saturation), AV_OPT_TYPE_FLOAT, {.dbl=0.5},0, 1, VF },
289  { "lightness", "set the lightness", OFFSET(lightness), AV_OPT_TYPE_FLOAT, {.dbl=0.5},0, 1, VF },
290  { "mix", "set the mix of source lightness", OFFSET(mix), AV_OPT_TYPE_FLOAT, {.dbl=1}, 0, 1, VF },
291  { NULL }
292 };
293 
294 AVFILTER_DEFINE_CLASS(colorize);
295 
297  .name = "colorize",
298  .description = NULL_IF_CONFIG_SMALL("Overlay a solid color on the video stream."),
299  .priv_size = sizeof(ColorizeContext),
300  .priv_class = &colorize_class,
302  .inputs = colorize_inputs,
303  .outputs = colorize_outputs,
306 };
int(* do_plane_slice[2])(AVFilterContext *s, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorize.c:39
#define NULL
Definition: coverity.c:32
static float lerpf(float v0, float v1, float f)
Definition: vf_colorize.c:43
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:442
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:401
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:434
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2573
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
AVOption.
Definition: opt.h:248
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:436
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:409
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:437
const char * desc
Definition: libsvtav1.c:79
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
misc image utilities
Main libavfilter public API header.
const char * g
Definition: vf_curves.c:117
static int colorize_slice16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorize.c:122
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:403
GLfloat v0
Definition: opengl_enc.c:106
static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
Definition: vf_colorize.c:197
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:92
static const AVFilterPad colorize_inputs[]
Definition: vf_colorize.c:264
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:287
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:126
const char * name
Pad name.
Definition: internal.h:60
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1094
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:101
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
#define av_cold
Definition: attributes.h:88
AVOptions.
#define f(width, name)
Definition: cbs_vp9.c:255
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:264
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:433
#define height
static int do_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorize.c:150
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range...
Definition: pixfmt.h:100
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:412
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:404
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:441
A filter pad used for either input or output.
Definition: internal.h:54
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:176
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:588
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:117
int ff_filter_process_command(AVFilterContext *ctx, const char *cmd, const char *arg, char *res, int res_len, int flags)
Generic processing of user supplied commands that are set in the same way as the filter options...
Definition: avfilter.c:882
const char * r
Definition: vf_curves.c:116
void * priv
private data for use by the filter
Definition: avfilter.h:356
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:443
const char * arg
Definition: jacosubdec.c:66
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:402
AVFILTER_DEFINE_CLASS(colorize)
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:397
#define b
Definition: input.c:41
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:802
#define FFMIN(a, b)
Definition: common.h:105
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: af_acrusher.c:336
#define AV_PIX_FMT_YUVA444P12
Definition: pixfmt.h:440
#define width
#define VF
Definition: vf_colorize.c:284
static void rgb2yuv(float r, float g, float b, int *y, int *u, int *v, int depth)
Definition: vf_colorize.c:187
AVFormatContext * ctx
Definition: movenc.c:48
int planewidth[4]
Definition: vf_colorize.c:36
static const AVFilterPad colorize_outputs[]
Definition: vf_colorize.c:275
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
#define s(width, name)
Definition: cbs_vp9.c:257
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:438
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:398
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:410
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:407
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:177
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:399
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:145
static int colorize_slice8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorize.c:94
const char * name
Filter name.
Definition: avfilter.h:149
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:405
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:396
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:353
static av_cold int config_input(AVFilterLink *inlink)
Definition: vf_colorize.c:244
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:408
#define flags(name, subs,...)
Definition: cbs_av1.c:561
AVFilter ff_vf_colorize
Definition: vf_colorize.c:296
AVFilterInternal * internal
An opaque struct for libavfilter internal use.
Definition: avfilter.h:381
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:400
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:406
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
int
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
static void hsl2rgb(float h, float s, float l, float *r, float *g, float *b)
Definition: vf_colorize.c:171
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:435
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
static float hue2rgb(float p, float q, float t)
Definition: vf_colorize.c:160
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
int planeheight[4]
Definition: vf_colorize.c:37
avfilter_execute_func * execute
Definition: internal.h:136
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2032
static int colorizey_slice16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorize.c:71
A list of supported formats for one end of a filter link.
Definition: formats.h:65
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:258
An instance of a filter.
Definition: avfilter.h:341
#define OFFSET(x)
Definition: vf_colorize.c:283
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
static av_cold int query_formats(AVFilterContext *ctx)
Definition: vf_colorize.c:212
formats
Definition: signature.h:48
internal API functions
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
int depth
Number of bits in the component.
Definition: pixdesc.h:58
static int colorizey_slice8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorize.c:48
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:411
for(j=16;j >0;--j)
static const AVOption colorize_options[]
Definition: vf_colorize.c:286
#define AV_PIX_FMT_YUVA422P12
Definition: pixfmt.h:439
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58