FFmpeg
colorchannelmixer_template.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2013 Paul B Mahol
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include <float.h>
22 
23 #undef pixel
24 #undef cpixel
25 #undef ROUND
26 #if DEPTH == 8
27 #define pixel uint8_t
28 #define cpixel int
29 #define ROUND lrintf
30 #elif DEPTH == 16
31 #define pixel uint16_t
32 #define cpixel int
33 #define ROUND lrintf
34 #else
35 #define NOP(x) (x)
36 #define pixel float
37 #define cpixel float
38 #define ROUND NOP
39 #endif
40 
41 #undef fn
42 #undef fn2
43 #undef fn3
44 #define fn3(a,b) a##_##b
45 #define fn2(a,b) fn3(a,b)
46 #define fn(a) fn2(a, DEPTH)
47 
48 static av_always_inline int fn(filter_slice_rgba_planar)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs,
49  int have_alpha, int depth, int pc)
50 {
52  ThreadData *td = arg;
53  AVFrame *in = td->in;
54  AVFrame *out = td->out;
55  const float pa = s->preserve_amount;
56  const float max = (1 << depth) - 1;
57  const int slice_start = (out->height * jobnr) / nb_jobs;
58  const int slice_end = (out->height * (jobnr+1)) / nb_jobs;
59  const pixel *srcg = (const pixel *)(in->data[0] + slice_start * in->linesize[0]);
60  const pixel *srcb = (const pixel *)(in->data[1] + slice_start * in->linesize[1]);
61  const pixel *srcr = (const pixel *)(in->data[2] + slice_start * in->linesize[2]);
62  const pixel *srca = (const pixel *)(in->data[3] + slice_start * in->linesize[3]);
63  pixel *dstg = (pixel *)(out->data[0] + slice_start * out->linesize[0]);
64  pixel *dstb = (pixel *)(out->data[1] + slice_start * out->linesize[1]);
65  pixel *dstr = (pixel *)(out->data[2] + slice_start * out->linesize[2]);
66  pixel *dsta = (pixel *)(out->data[3] + slice_start * out->linesize[3]);
67 
68  for (int i = slice_start; i < slice_end; i++) {
69  for (int j = 0; j < out->width; j++) {
70  const pixel rin = srcr[j];
71  const pixel gin = srcg[j];
72  const pixel bin = srcb[j];
73  const pixel ain = have_alpha ? srca[j] : 0;
74  cpixel rout, gout, bout;
75 
76 #if DEPTH == 32
77  rout = s->rr * rin +
78  s->rg * gin +
79  s->rb * bin +
80  (have_alpha == 1 ? s->ra * ain : 0);
81  gout = s->gr * rin +
82  s->gg * gin +
83  s->gb * bin +
84  (have_alpha == 1 ? s->ga * ain : 0);
85  bout = s->br * rin +
86  s->bg * gin +
87  s->bb * bin +
88  (have_alpha == 1 ? s->ba * ain : 0);
89 #else
90  rout = s->lut[R][R][rin] +
91  s->lut[R][G][gin] +
92  s->lut[R][B][bin] +
93  (have_alpha == 1 ? s->lut[R][A][ain] : 0);
94  gout = s->lut[G][R][rin] +
95  s->lut[G][G][gin] +
96  s->lut[G][B][bin] +
97  (have_alpha == 1 ? s->lut[G][A][ain] : 0);
98  bout = s->lut[B][R][rin] +
99  s->lut[B][G][gin] +
100  s->lut[B][B][bin] +
101  (have_alpha == 1 ? s->lut[B][A][ain] : 0);
102 #endif
103 
104  if (pc) {
105  float frout, fgout, fbout, lin, lout;
106 
107 #if DEPTH < 32
108  frout = av_clipf(rout, 0.f, max);
109  fgout = av_clipf(gout, 0.f, max);
110  fbout = av_clipf(bout, 0.f, max);
111 #else
112  frout = rout;
113  fgout = gout;
114  fbout = bout;
115 #endif
116 
117  preserve_color(s->preserve_color, rin, gin, bin,
118  rout, gout, bout, max, &lin, &lout);
119  preservel(&frout, &fgout, &fbout, lin, lout, max);
120 
121  rout = ROUND(lerpf(rout, frout, pa));
122  gout = ROUND(lerpf(gout, fgout, pa));
123  bout = ROUND(lerpf(bout, fbout, pa));
124  }
125 
126 #if DEPTH < 32
127  dstr[j] = av_clip_uintp2(rout, depth);
128  dstg[j] = av_clip_uintp2(gout, depth);
129  dstb[j] = av_clip_uintp2(bout, depth);
130 #else
131  dstr[j] = rout;
132  dstg[j] = gout;
133  dstb[j] = bout;
134 #endif
135 
136  if (have_alpha == 1) {
137 #if DEPTH < 32
138  dsta[j] = av_clip_uintp2(s->lut[A][R][rin] +
139  s->lut[A][G][gin] +
140  s->lut[A][B][bin] +
141  s->lut[A][A][ain], depth);
142 #else
143  dsta[j] = s->ar * rin +
144  s->ag * gin +
145  s->ab * bin +
146  s->aa * ain;
147 #endif
148  }
149  }
150 
151  srcg += in->linesize[0] / sizeof(pixel);
152  srcb += in->linesize[1] / sizeof(pixel);
153  srcr += in->linesize[2] / sizeof(pixel);
154  srca += in->linesize[3] / sizeof(pixel);
155  dstg += out->linesize[0] / sizeof(pixel);
156  dstb += out->linesize[1] / sizeof(pixel);
157  dstr += out->linesize[2] / sizeof(pixel);
158  dsta += out->linesize[3] / sizeof(pixel);
159  }
160 
161  return 0;
162 }
163 
164 #if DEPTH < 32
165 
166 static av_always_inline int fn(filter_slice_rgba_packed)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs,
167  int have_alpha, int step, int pc, int depth)
168 {
169  ColorChannelMixerContext *s = ctx->priv;
170  ThreadData *td = arg;
171  AVFrame *in = td->in;
172  AVFrame *out = td->out;
173  const float pa = s->preserve_amount;
174  const float max = (1 << depth) - 1;
175  const int slice_start = (out->height * jobnr) / nb_jobs;
176  const int slice_end = (out->height * (jobnr+1)) / nb_jobs;
177  const uint8_t roffset = s->rgba_map[R];
178  const uint8_t goffset = s->rgba_map[G];
179  const uint8_t boffset = s->rgba_map[B];
180  const uint8_t aoffset = s->rgba_map[A];
181  const uint8_t *srcrow = in->data[0] + slice_start * in->linesize[0];
182  uint8_t *dstrow = out->data[0] + slice_start * out->linesize[0];
183  int i, j;
184 
185  for (i = slice_start; i < slice_end; i++) {
186  const pixel *src = (const pixel *)srcrow;
187  pixel *dst = (pixel *)dstrow;
188 
189  for (j = 0; j < out->width * step; j += step) {
190  const pixel rin = src[j + roffset];
191  const pixel gin = src[j + goffset];
192  const pixel bin = src[j + boffset];
193  const pixel ain = src[j + aoffset];
194  int rout, gout, bout;
195 
196  rout = s->lut[R][R][rin] +
197  s->lut[R][G][gin] +
198  s->lut[R][B][bin] +
199  (have_alpha == 1 ? s->lut[R][A][ain] : 0);
200  gout = s->lut[G][R][rin] +
201  s->lut[G][G][gin] +
202  s->lut[G][B][bin] +
203  (have_alpha == 1 ? s->lut[G][A][ain] : 0);
204  bout = s->lut[B][R][rin] +
205  s->lut[B][G][gin] +
206  s->lut[B][B][bin] +
207  (have_alpha == 1 ? s->lut[B][A][ain] : 0);
208 
209  if (pc) {
210  float frout = av_clipf(rout, 0.f, max);
211  float fgout = av_clipf(gout, 0.f, max);
212  float fbout = av_clipf(bout, 0.f, max);
213  float lin, lout;
214 
215  preserve_color(s->preserve_color, rin, gin, bin,
216  rout, gout, bout, max, &lin, &lout);
217  preservel(&frout, &fgout, &fbout, lin, lout, max);
218 
219  rout = lrintf(lerpf(rout, frout, pa));
220  gout = lrintf(lerpf(gout, fgout, pa));
221  bout = lrintf(lerpf(bout, fbout, pa));
222  }
223 
224  dst[j + roffset] = av_clip_uintp2(rout, depth);
225  dst[j + goffset] = av_clip_uintp2(gout, depth);
226  dst[j + boffset] = av_clip_uintp2(bout, depth);
227 
228  if (have_alpha == 1) {
229  dst[j + aoffset] = av_clip_uintp2(s->lut[A][R][rin] +
230  s->lut[A][G][gin] +
231  s->lut[A][B][bin] +
232  s->lut[A][A][ain], depth);
233  }
234  }
235 
236  srcrow += in->linesize[0];
237  dstrow += out->linesize[0];
238  }
239 
240  return 0;
241 }
242 
243 #endif
A
#define A(x)
Definition: vpx_arith.h:28
td
#define td
Definition: regdef.h:70
out
FILE * out
Definition: movenc.c:55
av_clip_uintp2
#define av_clip_uintp2
Definition: common.h:123
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
step
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But a word about which is also called distortion Distortion can be quantified by almost any quality measurement one chooses the sum of squared differences is used but more complex methods that consider psychovisual effects can be used as well It makes no difference in this discussion First step
Definition: rate_distortion.txt:58
preserve_color
static void preserve_color(int preserve_color, float ir, float ig, float ib, float r, float g, float b, float max, float *icolor, float *ocolor)
Definition: preserve_color.h:53
R
#define R
Definition: huffyuv.h:44
float.h
max
#define max(a, b)
Definition: cuda_runtime.h:33
cpixel
#define cpixel
Definition: colorchannelmixer_template.c:37
preservel
static void preservel(float *r, float *g, float *b, float l)
Definition: vf_colorbalance.c:117
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:395
s
#define s(width, name)
Definition: cbs_vp9.c:198
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:1730
B
#define B
Definition: huffyuv.h:42
ctx
AVFormatContext * ctx
Definition: movenc.c:49
ColorChannelMixerContext
Definition: vf_colorchannelmixer.c:41
arg
const char * arg
Definition: jacosubdec.c:67
pixel
uint8_t pixel
Definition: tiny_ssim.c:41
lerpf
static float lerpf(float v0, float v1, float f)
Definition: vf_cas.c:42
av_clipf
av_clipf
Definition: af_crystalizer.c:121
pixel
#define pixel
Definition: colorchannelmixer_template.c:36
filter_slice_rgba_planar
static av_always_inline int fn() filter_slice_rgba_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs, int have_alpha, int depth, int pc)
Definition: colorchannelmixer_template.c:48
f
f
Definition: af_crystalizer.c:121
filter_slice_rgba_packed
static av_always_inline int fn() filter_slice_rgba_packed(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs, int have_alpha, int step, int pc, int depth)
Definition: colorchannelmixer_template.c:166
ROUND
#define ROUND
Definition: colorchannelmixer_template.c:38
lrintf
#define lrintf(x)
Definition: libm_mips.h:72
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
ThreadData
Used for passing data between threads.
Definition: dsddec.c:71
av_always_inline
#define av_always_inline
Definition: attributes.h:49
slice_start
static int slice_start(SliceContext *sc, VVCContext *s, VVCFrameContext *fc, const CodedBitstreamUnit *unit, const int is_first_slice)
Definition: dec.c:688
G
#define G
Definition: huffyuv.h:43
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:419
fn
#define fn(a)
Definition: colorchannelmixer_template.c:46