FFmpeg
vf_paletteuse.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Stupeflix
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * Use a palette to downsample an input video stream.
24  */
25 
26 #include "libavutil/bprint.h"
27 #include "libavutil/internal.h"
28 #include "libavutil/opt.h"
29 #include "libavutil/qsort.h"
30 #include "avfilter.h"
31 #include "filters.h"
32 #include "framesync.h"
33 #include "internal.h"
34 
43 };
44 
50 };
51 
52 enum diff_mode {
56 };
57 
58 struct color_node {
61  int split;
63 };
64 
65 #define NBITS 5
66 #define CACHE_SIZE (1<<(3*NBITS))
67 
68 struct cached_color {
69  uint32_t color;
71 };
72 
73 struct cache_node {
76 };
77 
78 struct PaletteUseContext;
79 
81  int x_start, int y_start, int width, int height);
82 
83 typedef struct PaletteUseContext {
84  const AVClass *class;
86  struct cache_node cache[CACHE_SIZE]; /* lookup cache */
87  struct color_node map[AVPALETTE_COUNT]; /* 3D-Tree (KD-Tree with K=3) for reverse colormap */
89  int transparency_index; /* index in the palette of transparency. -1 if there is no transparency in the palette. */
92  int dither;
93  int new;
96  int ordered_dither[8*8];
97  int diff_mode;
100 
101  /* debug options */
105  uint64_t total_mean_err;
108 
109 #define OFFSET(x) offsetof(PaletteUseContext, x)
110 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
111 static const AVOption paletteuse_options[] = {
112  { "dither", "select dithering mode", OFFSET(dither), AV_OPT_TYPE_INT, {.i64=DITHERING_SIERRA2_4A}, 0, NB_DITHERING-1, FLAGS, "dithering_mode" },
113  { "bayer", "ordered 8x8 bayer dithering (deterministic)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_BAYER}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
114  { "heckbert", "dithering as defined by Paul Heckbert in 1982 (simple error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_HECKBERT}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
115  { "floyd_steinberg", "Floyd and Steingberg dithering (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_FLOYD_STEINBERG}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
116  { "sierra2", "Frankie Sierra dithering v2 (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_SIERRA2}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
117  { "sierra2_4a", "Frankie Sierra dithering v2 \"Lite\" (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_SIERRA2_4A}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
118  { "bayer_scale", "set scale for bayer dithering", OFFSET(bayer_scale), AV_OPT_TYPE_INT, {.i64=2}, 0, 5, FLAGS },
119  { "diff_mode", "set frame difference mode", OFFSET(diff_mode), AV_OPT_TYPE_INT, {.i64=DIFF_MODE_NONE}, 0, NB_DIFF_MODE-1, FLAGS, "diff_mode" },
120  { "rectangle", "process smallest different rectangle", 0, AV_OPT_TYPE_CONST, {.i64=DIFF_MODE_RECTANGLE}, INT_MIN, INT_MAX, FLAGS, "diff_mode" },
121  { "new", "take new palette for each output frame", OFFSET(new), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
122  { "alpha_threshold", "set the alpha threshold for transparency", OFFSET(trans_thresh), AV_OPT_TYPE_INT, {.i64=128}, 0, 255, FLAGS },
123 
124  /* following are the debug options, not part of the official API */
125  { "debug_kdtree", "save Graphviz graph of the kdtree in specified file", OFFSET(dot_filename), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS },
126  { "color_search", "set reverse colormap color search method", OFFSET(color_search_method), AV_OPT_TYPE_INT, {.i64=COLOR_SEARCH_NNS_ITERATIVE}, 0, NB_COLOR_SEARCHES-1, FLAGS, "search" },
127  { "nns_iterative", "iterative search", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_NNS_ITERATIVE}, INT_MIN, INT_MAX, FLAGS, "search" },
128  { "nns_recursive", "recursive search", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_NNS_RECURSIVE}, INT_MIN, INT_MAX, FLAGS, "search" },
129  { "bruteforce", "brute-force into the palette", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_BRUTEFORCE}, INT_MIN, INT_MAX, FLAGS, "search" },
130  { "mean_err", "compute and print mean error", OFFSET(calc_mean_err), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
131  { "debug_accuracy", "test color search accuracy", OFFSET(debug_accuracy), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
132  { NULL }
133 };
134 
135 AVFILTER_DEFINE_CLASS(paletteuse);
136 
137 static int load_apply_palette(FFFrameSync *fs);
138 
140 {
141  static const enum AVPixelFormat in_fmts[] = {AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE};
142  static const enum AVPixelFormat inpal_fmts[] = {AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE};
143  static const enum AVPixelFormat out_fmts[] = {AV_PIX_FMT_PAL8, AV_PIX_FMT_NONE};
144  int ret;
146  AVFilterFormats *inpal = ff_make_format_list(inpal_fmts);
148  if (!in || !inpal || !out) {
149  av_freep(&in);
150  av_freep(&inpal);
151  av_freep(&out);
152  return AVERROR(ENOMEM);
153  }
154  if ((ret = ff_formats_ref(in , &ctx->inputs[0]->out_formats)) < 0 ||
155  (ret = ff_formats_ref(inpal, &ctx->inputs[1]->out_formats)) < 0 ||
156  (ret = ff_formats_ref(out , &ctx->outputs[0]->in_formats)) < 0)
157  return ret;
158  return 0;
159 }
160 
161 static av_always_inline int dither_color(uint32_t px, int er, int eg, int eb, int scale, int shift)
162 {
163  return av_clip_uint8( px >> 24 ) << 24
164  | av_clip_uint8((px >> 16 & 0xff) + ((er * scale) / (1<<shift))) << 16
165  | av_clip_uint8((px >> 8 & 0xff) + ((eg * scale) / (1<<shift))) << 8
166  | av_clip_uint8((px & 0xff) + ((eb * scale) / (1<<shift)));
167 }
168 
169 static av_always_inline int diff(const uint8_t *c1, const uint8_t *c2, const int trans_thresh)
170 {
171  // XXX: try L*a*b with CIE76 (dL*dL + da*da + db*db)
172  const int dr = c1[1] - c2[1];
173  const int dg = c1[2] - c2[2];
174  const int db = c1[3] - c2[3];
175 
176  if (c1[0] < trans_thresh && c2[0] < trans_thresh) {
177  return 0;
178  } else if (c1[0] >= trans_thresh && c2[0] >= trans_thresh) {
179  return dr*dr + dg*dg + db*db;
180  } else {
181  return 255*255 + 255*255 + 255*255;
182  }
183 }
184 
185 static av_always_inline uint8_t colormap_nearest_bruteforce(const uint32_t *palette, const uint8_t *argb, const int trans_thresh)
186 {
187  int i, pal_id = -1, min_dist = INT_MAX;
188 
189  for (i = 0; i < AVPALETTE_COUNT; i++) {
190  const uint32_t c = palette[i];
191 
192  if (c >> 24 >= trans_thresh) { // ignore transparent entry
193  const uint8_t palargb[] = {
194  palette[i]>>24 & 0xff,
195  palette[i]>>16 & 0xff,
196  palette[i]>> 8 & 0xff,
197  palette[i] & 0xff,
198  };
199  const int d = diff(palargb, argb, trans_thresh);
200  if (d < min_dist) {
201  pal_id = i;
202  min_dist = d;
203  }
204  }
205  }
206  return pal_id;
207 }
208 
209 /* Recursive form, simpler but a bit slower. Kept for reference. */
211  int node_pos;
212  int dist_sqd;
213 };
214 
215 static void colormap_nearest_node(const struct color_node *map,
216  const int node_pos,
217  const uint8_t *target,
218  const int trans_thresh,
219  struct nearest_color *nearest)
220 {
221  const struct color_node *kd = map + node_pos;
222  const int s = kd->split;
223  int dx, nearer_kd_id, further_kd_id;
224  const uint8_t *current = kd->val;
225  const int current_to_target = diff(target, current, trans_thresh);
226 
227  if (current_to_target < nearest->dist_sqd) {
228  nearest->node_pos = node_pos;
229  nearest->dist_sqd = current_to_target;
230  }
231 
232  if (kd->left_id != -1 || kd->right_id != -1) {
233  dx = target[s] - current[s];
234 
235  if (dx <= 0) nearer_kd_id = kd->left_id, further_kd_id = kd->right_id;
236  else nearer_kd_id = kd->right_id, further_kd_id = kd->left_id;
237 
238  if (nearer_kd_id != -1)
239  colormap_nearest_node(map, nearer_kd_id, target, trans_thresh, nearest);
240 
241  if (further_kd_id != -1 && dx*dx < nearest->dist_sqd)
242  colormap_nearest_node(map, further_kd_id, target, trans_thresh, nearest);
243  }
244 }
245 
246 static av_always_inline uint8_t colormap_nearest_recursive(const struct color_node *node, const uint8_t *rgb, const int trans_thresh)
247 {
248  struct nearest_color res = {.dist_sqd = INT_MAX, .node_pos = -1};
249  colormap_nearest_node(node, 0, rgb, trans_thresh, &res);
250  return node[res.node_pos].palette_id;
251 }
252 
253 struct stack_node {
254  int color_id;
255  int dx2;
256 };
257 
258 static av_always_inline uint8_t colormap_nearest_iterative(const struct color_node *root, const uint8_t *target, const int trans_thresh)
259 {
260  int pos = 0, best_node_id = -1, best_dist = INT_MAX, cur_color_id = 0;
261  struct stack_node nodes[16];
262  struct stack_node *node = &nodes[0];
263 
264  for (;;) {
265 
266  const struct color_node *kd = &root[cur_color_id];
267  const uint8_t *current = kd->val;
268  const int current_to_target = diff(target, current, trans_thresh);
269 
270  /* Compare current color node to the target and update our best node if
271  * it's actually better. */
272  if (current_to_target < best_dist) {
273  best_node_id = cur_color_id;
274  if (!current_to_target)
275  goto end; // exact match, we can return immediately
276  best_dist = current_to_target;
277  }
278 
279  /* Check if it's not a leaf */
280  if (kd->left_id != -1 || kd->right_id != -1) {
281  const int split = kd->split;
282  const int dx = target[split] - current[split];
283  int nearer_kd_id, further_kd_id;
284 
285  /* Define which side is the most interesting. */
286  if (dx <= 0) nearer_kd_id = kd->left_id, further_kd_id = kd->right_id;
287  else nearer_kd_id = kd->right_id, further_kd_id = kd->left_id;
288 
289  if (nearer_kd_id != -1) {
290  if (further_kd_id != -1) {
291  /* Here, both paths are defined, so we push a state for
292  * when we are going back. */
293  node->color_id = further_kd_id;
294  node->dx2 = dx*dx;
295  pos++;
296  node++;
297  }
298  /* We can now update current color with the most probable path
299  * (no need to create a state since there is nothing to save
300  * anymore). */
301  cur_color_id = nearer_kd_id;
302  continue;
303  } else if (dx*dx < best_dist) {
304  /* The nearest path isn't available, so there is only one path
305  * possible and it's the least probable. We enter it only if the
306  * distance from the current point to the hyper rectangle is
307  * less than our best distance. */
308  cur_color_id = further_kd_id;
309  continue;
310  }
311  }
312 
313  /* Unstack as much as we can, typically as long as the least probable
314  * branch aren't actually probable. */
315  do {
316  if (--pos < 0)
317  goto end;
318  node--;
319  } while (node->dx2 >= best_dist);
320 
321  /* We got a node where the least probable branch might actually contain
322  * a relevant color. */
323  cur_color_id = node->color_id;
324  }
325 
326 end:
327  return root[best_node_id].palette_id;
328 }
329 
330 #define COLORMAP_NEAREST(search, palette, root, target, trans_thresh) \
331  search == COLOR_SEARCH_NNS_ITERATIVE ? colormap_nearest_iterative(root, target, trans_thresh) : \
332  search == COLOR_SEARCH_NNS_RECURSIVE ? colormap_nearest_recursive(root, target, trans_thresh) : \
333  colormap_nearest_bruteforce(palette, target, trans_thresh)
334 
335 /**
336  * Check if the requested color is in the cache already. If not, find it in the
337  * color tree and cache it.
338  * Note: a, r, g, and b are the components of color, but are passed as well to avoid
339  * recomputing them (they are generally computed by the caller for other uses).
340  */
343  const enum color_search_method search_method)
344 {
345  int i;
346  const uint8_t argb_elts[] = {a, r, g, b};
347  const uint8_t rhash = r & ((1<<NBITS)-1);
348  const uint8_t ghash = g & ((1<<NBITS)-1);
349  const uint8_t bhash = b & ((1<<NBITS)-1);
350  const unsigned hash = rhash<<(NBITS*2) | ghash<<NBITS | bhash;
351  struct cache_node *node = &s->cache[hash];
352  struct cached_color *e;
353 
354  // first, check for transparency
355  if (a < s->trans_thresh && s->transparency_index >= 0) {
356  return s->transparency_index;
357  }
358 
359  for (i = 0; i < node->nb_entries; i++) {
360  e = &node->entries[i];
361  if (e->color == color)
362  return e->pal_entry;
363  }
364 
365  e = av_dynarray2_add((void**)&node->entries, &node->nb_entries,
366  sizeof(*node->entries), NULL);
367  if (!e)
368  return AVERROR(ENOMEM);
369  e->color = color;
370  e->pal_entry = COLORMAP_NEAREST(search_method, s->palette, s->map, argb_elts, s->trans_thresh);
371 
372  return e->pal_entry;
373 }
374 
376  uint32_t c, int *er, int *eg, int *eb,
377  const enum color_search_method search_method)
378 {
379  const uint8_t a = c >> 24 & 0xff;
380  const uint8_t r = c >> 16 & 0xff;
381  const uint8_t g = c >> 8 & 0xff;
382  const uint8_t b = c & 0xff;
383  uint32_t dstc;
384  const int dstx = color_get(s, c, a, r, g, b, search_method);
385  if (dstx < 0)
386  return dstx;
387  dstc = s->palette[dstx];
388  *er = r - (dstc >> 16 & 0xff);
389  *eg = g - (dstc >> 8 & 0xff);
390  *eb = b - (dstc & 0xff);
391  return dstx;
392 }
393 
395  int x_start, int y_start, int w, int h,
396  enum dithering_mode dither,
397  const enum color_search_method search_method)
398 {
399  int x, y;
400  const int src_linesize = in ->linesize[0] >> 2;
401  const int dst_linesize = out->linesize[0];
402  uint32_t *src = ((uint32_t *)in ->data[0]) + y_start*src_linesize;
403  uint8_t *dst = out->data[0] + y_start*dst_linesize;
404 
405  w += x_start;
406  h += y_start;
407 
408  for (y = y_start; y < h; y++) {
409  for (x = x_start; x < w; x++) {
410  int er, eg, eb;
411 
412  if (dither == DITHERING_BAYER) {
413  const int d = s->ordered_dither[(y & 7)<<3 | (x & 7)];
414  const uint8_t a8 = src[x] >> 24 & 0xff;
415  const uint8_t r8 = src[x] >> 16 & 0xff;
416  const uint8_t g8 = src[x] >> 8 & 0xff;
417  const uint8_t b8 = src[x] & 0xff;
418  const uint8_t r = av_clip_uint8(r8 + d);
419  const uint8_t g = av_clip_uint8(g8 + d);
420  const uint8_t b = av_clip_uint8(b8 + d);
421  const int color = color_get(s, src[x], a8, r, g, b, search_method);
422 
423  if (color < 0)
424  return color;
425  dst[x] = color;
426 
427  } else if (dither == DITHERING_HECKBERT) {
428  const int right = x < w - 1, down = y < h - 1;
429  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
430 
431  if (color < 0)
432  return color;
433  dst[x] = color;
434 
435  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 3, 3);
436  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 3, 3);
437  if (right && down) src[src_linesize + x + 1] = dither_color(src[src_linesize + x + 1], er, eg, eb, 2, 3);
438 
439  } else if (dither == DITHERING_FLOYD_STEINBERG) {
440  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
441  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
442 
443  if (color < 0)
444  return color;
445  dst[x] = color;
446 
447  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 7, 4);
448  if (left && down) src[src_linesize + x - 1] = dither_color(src[src_linesize + x - 1], er, eg, eb, 3, 4);
449  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 5, 4);
450  if (right && down) src[src_linesize + x + 1] = dither_color(src[src_linesize + x + 1], er, eg, eb, 1, 4);
451 
452  } else if (dither == DITHERING_SIERRA2) {
453  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
454  const int right2 = x < w - 2, left2 = x > x_start + 1;
455  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
456 
457  if (color < 0)
458  return color;
459  dst[x] = color;
460 
461  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 4, 4);
462  if (right2) src[ x + 2] = dither_color(src[ x + 2], er, eg, eb, 3, 4);
463 
464  if (down) {
465  if (left2) src[ src_linesize + x - 2] = dither_color(src[ src_linesize + x - 2], er, eg, eb, 1, 4);
466  if (left) src[ src_linesize + x - 1] = dither_color(src[ src_linesize + x - 1], er, eg, eb, 2, 4);
467  if (1) src[ src_linesize + x ] = dither_color(src[ src_linesize + x ], er, eg, eb, 3, 4);
468  if (right) src[ src_linesize + x + 1] = dither_color(src[ src_linesize + x + 1], er, eg, eb, 2, 4);
469  if (right2) src[ src_linesize + x + 2] = dither_color(src[ src_linesize + x + 2], er, eg, eb, 1, 4);
470  }
471 
472  } else if (dither == DITHERING_SIERRA2_4A) {
473  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
474  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
475 
476  if (color < 0)
477  return color;
478  dst[x] = color;
479 
480  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 2, 2);
481  if (left && down) src[src_linesize + x - 1] = dither_color(src[src_linesize + x - 1], er, eg, eb, 1, 2);
482  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 1, 2);
483 
484  } else {
485  const uint8_t a = src[x] >> 24 & 0xff;
486  const uint8_t r = src[x] >> 16 & 0xff;
487  const uint8_t g = src[x] >> 8 & 0xff;
488  const uint8_t b = src[x] & 0xff;
489  const int color = color_get(s, src[x], a, r, g, b, search_method);
490 
491  if (color < 0)
492  return color;
493  dst[x] = color;
494  }
495  }
496  src += src_linesize;
497  dst += dst_linesize;
498  }
499  return 0;
500 }
501 
502 #define INDENT 4
503 static void disp_node(AVBPrint *buf,
504  const struct color_node *map,
505  int parent_id, int node_id,
506  int depth)
507 {
508  const struct color_node *node = &map[node_id];
509  const uint32_t fontcolor = node->val[1] > 0x50 &&
510  node->val[2] > 0x50 &&
511  node->val[3] > 0x50 ? 0 : 0xffffff;
512  const int rgb_comp = node->split - 1;
513  av_bprintf(buf, "%*cnode%d ["
514  "label=\"%c%02X%c%02X%c%02X%c\" "
515  "fillcolor=\"#%02x%02x%02x\" "
516  "fontcolor=\"#%06"PRIX32"\"]\n",
517  depth*INDENT, ' ', node->palette_id,
518  "[ "[rgb_comp], node->val[1],
519  "][ "[rgb_comp], node->val[2],
520  " ]["[rgb_comp], node->val[3],
521  " ]"[rgb_comp],
522  node->val[1], node->val[2], node->val[3],
523  fontcolor);
524  if (parent_id != -1)
525  av_bprintf(buf, "%*cnode%d -> node%d\n", depth*INDENT, ' ',
526  map[parent_id].palette_id, node->palette_id);
527  if (node->left_id != -1) disp_node(buf, map, node_id, node->left_id, depth + 1);
528  if (node->right_id != -1) disp_node(buf, map, node_id, node->right_id, depth + 1);
529 }
530 
531 // debug_kdtree=kdtree.dot -> dot -Tpng kdtree.dot > kdtree.png
532 static int disp_tree(const struct color_node *node, const char *fname)
533 {
534  AVBPrint buf;
535  FILE *f = av_fopen_utf8(fname, "w");
536 
537  if (!f) {
538  int ret = AVERROR(errno);
539  av_log(NULL, AV_LOG_ERROR, "Cannot open file '%s' for writing: %s\n",
540  fname, av_err2str(ret));
541  return ret;
542  }
543 
545 
546  av_bprintf(&buf, "digraph {\n");
547  av_bprintf(&buf, " node [style=filled fontsize=10 shape=box]\n");
548  disp_node(&buf, node, -1, 0, 0);
549  av_bprintf(&buf, "}\n");
550 
551  fwrite(buf.str, 1, buf.len, f);
552  fclose(f);
554  return 0;
555 }
556 
557 static int debug_accuracy(const struct color_node *node, const uint32_t *palette, const int trans_thresh,
558  const enum color_search_method search_method)
559 {
560  int r, g, b, ret = 0;
561 
562  for (r = 0; r < 256; r++) {
563  for (g = 0; g < 256; g++) {
564  for (b = 0; b < 256; b++) {
565  const uint8_t argb[] = {0xff, r, g, b};
566  const int r1 = COLORMAP_NEAREST(search_method, palette, node, argb, trans_thresh);
567  const int r2 = colormap_nearest_bruteforce(palette, argb, trans_thresh);
568  if (r1 != r2) {
569  const uint32_t c1 = palette[r1];
570  const uint32_t c2 = palette[r2];
571  const uint8_t palargb1[] = { 0xff, c1>>16 & 0xff, c1>> 8 & 0xff, c1 & 0xff };
572  const uint8_t palargb2[] = { 0xff, c2>>16 & 0xff, c2>> 8 & 0xff, c2 & 0xff };
573  const int d1 = diff(palargb1, argb, trans_thresh);
574  const int d2 = diff(palargb2, argb, trans_thresh);
575  if (d1 != d2) {
577  "/!\\ %02X%02X%02X: %d ! %d (%06"PRIX32" ! %06"PRIX32") / dist: %d ! %d\n",
578  r, g, b, r1, r2, c1 & 0xffffff, c2 & 0xffffff, d1, d2);
579  ret = 1;
580  }
581  }
582  }
583  }
584  }
585  return ret;
586 }
587 
588 struct color {
589  uint32_t value;
591 };
592 
593 struct color_rect {
596 };
597 
598 typedef int (*cmp_func)(const void *, const void *);
599 
600 #define DECLARE_CMP_FUNC(name, pos) \
601 static int cmp_##name(const void *pa, const void *pb) \
602 { \
603  const struct color *a = pa; \
604  const struct color *b = pb; \
605  return (a->value >> (8 * (3 - (pos))) & 0xff) \
606  - (b->value >> (8 * (3 - (pos))) & 0xff); \
607 }
608 
613 
614 static const cmp_func cmp_funcs[] = {cmp_a, cmp_r, cmp_g, cmp_b};
615 
616 static int get_next_color(const uint8_t *color_used, const uint32_t *palette,
617  const int trans_thresh,
618  int *component, const struct color_rect *box)
619 {
620  int wr, wg, wb;
621  int i, longest = 0;
622  unsigned nb_color = 0;
623  struct color_rect ranges;
624  struct color tmp_pal[256];
625  cmp_func cmpf;
626 
627  ranges.min[0] = ranges.min[1] = ranges.min[2] = 0xff;
628  ranges.max[0] = ranges.max[1] = ranges.max[2] = 0x00;
629 
630  for (i = 0; i < AVPALETTE_COUNT; i++) {
631  const uint32_t c = palette[i];
632  const uint8_t a = c >> 24 & 0xff;
633  const uint8_t r = c >> 16 & 0xff;
634  const uint8_t g = c >> 8 & 0xff;
635  const uint8_t b = c & 0xff;
636 
637  if (a < trans_thresh) {
638  continue;
639  }
640 
641  if (color_used[i] || (a != 0xff) ||
642  r < box->min[0] || g < box->min[1] || b < box->min[2] ||
643  r > box->max[0] || g > box->max[1] || b > box->max[2])
644  continue;
645 
646  if (r < ranges.min[0]) ranges.min[0] = r;
647  if (g < ranges.min[1]) ranges.min[1] = g;
648  if (b < ranges.min[2]) ranges.min[2] = b;
649 
650  if (r > ranges.max[0]) ranges.max[0] = r;
651  if (g > ranges.max[1]) ranges.max[1] = g;
652  if (b > ranges.max[2]) ranges.max[2] = b;
653 
654  tmp_pal[nb_color].value = c;
655  tmp_pal[nb_color].pal_id = i;
656 
657  nb_color++;
658  }
659 
660  if (!nb_color)
661  return -1;
662 
663  /* define longest axis that will be the split component */
664  wr = ranges.max[0] - ranges.min[0];
665  wg = ranges.max[1] - ranges.min[1];
666  wb = ranges.max[2] - ranges.min[2];
667  if (wr >= wg && wr >= wb) longest = 1;
668  if (wg >= wr && wg >= wb) longest = 2;
669  if (wb >= wr && wb >= wg) longest = 3;
670  cmpf = cmp_funcs[longest];
671  *component = longest;
672 
673  /* sort along this axis to get median */
674  AV_QSORT(tmp_pal, nb_color, struct color, cmpf);
675 
676  return tmp_pal[nb_color >> 1].pal_id;
677 }
678 
679 static int colormap_insert(struct color_node *map,
680  uint8_t *color_used,
681  int *nb_used,
682  const uint32_t *palette,
683  const int trans_thresh,
684  const struct color_rect *box)
685 {
686  uint32_t c;
687  int component, cur_id;
688  int node_left_id = -1, node_right_id = -1;
689  struct color_node *node;
690  struct color_rect box1, box2;
691  const int pal_id = get_next_color(color_used, palette, trans_thresh, &component, box);
692 
693  if (pal_id < 0)
694  return -1;
695 
696  /* create new node with that color */
697  cur_id = (*nb_used)++;
698  c = palette[pal_id];
699  node = &map[cur_id];
700  node->split = component;
701  node->palette_id = pal_id;
702  node->val[0] = c>>24 & 0xff;
703  node->val[1] = c>>16 & 0xff;
704  node->val[2] = c>> 8 & 0xff;
705  node->val[3] = c & 0xff;
706 
707  color_used[pal_id] = 1;
708 
709  /* get the two boxes this node creates */
710  box1 = box2 = *box;
711  box1.max[component-1] = node->val[component];
712  box2.min[component-1] = node->val[component] + 1;
713 
714  node_left_id = colormap_insert(map, color_used, nb_used, palette, trans_thresh, &box1);
715 
716  if (box2.min[component-1] <= box2.max[component-1])
717  node_right_id = colormap_insert(map, color_used, nb_used, palette, trans_thresh, &box2);
718 
719  node->left_id = node_left_id;
720  node->right_id = node_right_id;
721 
722  return cur_id;
723 }
724 
725 static int cmp_pal_entry(const void *a, const void *b)
726 {
727  const int c1 = *(const uint32_t *)a & 0xffffff;
728  const int c2 = *(const uint32_t *)b & 0xffffff;
729  return c1 - c2;
730 }
731 
733 {
734  int i, nb_used = 0;
735  uint8_t color_used[AVPALETTE_COUNT] = {0};
736  uint32_t last_color = 0;
737  struct color_rect box;
738 
739  /* disable transparent colors and dups */
740  qsort(s->palette, AVPALETTE_COUNT, sizeof(*s->palette), cmp_pal_entry);
741  // update transparency index:
742  if (s->transparency_index >= 0) {
743  for (i = 0; i < AVPALETTE_COUNT; i++) {
744  if ((s->palette[i]>>24 & 0xff) == 0) {
745  s->transparency_index = i; // we are assuming at most one transparent color in palette
746  break;
747  }
748  }
749  }
750 
751  for (i = 0; i < AVPALETTE_COUNT; i++) {
752  const uint32_t c = s->palette[i];
753  if (i != 0 && c == last_color) {
754  color_used[i] = 1;
755  continue;
756  }
757  last_color = c;
758  if (c >> 24 < s->trans_thresh) {
759  color_used[i] = 1; // ignore transparent color(s)
760  continue;
761  }
762  }
763 
764  box.min[0] = box.min[1] = box.min[2] = 0x00;
765  box.max[0] = box.max[1] = box.max[2] = 0xff;
766 
767  colormap_insert(s->map, color_used, &nb_used, s->palette, s->trans_thresh, &box);
768 
769  if (s->dot_filename)
770  disp_tree(s->map, s->dot_filename);
771 
772  if (s->debug_accuracy) {
773  if (!debug_accuracy(s->map, s->palette, s->trans_thresh, s->color_search_method))
774  av_log(NULL, AV_LOG_INFO, "Accuracy check passed\n");
775  }
776 }
777 
778 static void debug_mean_error(PaletteUseContext *s, const AVFrame *in1,
779  const AVFrame *in2, int frame_count)
780 {
781  int x, y;
782  const uint32_t *palette = s->palette;
783  uint32_t *src1 = (uint32_t *)in1->data[0];
784  uint8_t *src2 = in2->data[0];
785  const int src1_linesize = in1->linesize[0] >> 2;
786  const int src2_linesize = in2->linesize[0];
787  const float div = in1->width * in1->height * 3;
788  unsigned mean_err = 0;
789 
790  for (y = 0; y < in1->height; y++) {
791  for (x = 0; x < in1->width; x++) {
792  const uint32_t c1 = src1[x];
793  const uint32_t c2 = palette[src2[x]];
794  const uint8_t argb1[] = {0xff, c1 >> 16 & 0xff, c1 >> 8 & 0xff, c1 & 0xff};
795  const uint8_t argb2[] = {0xff, c2 >> 16 & 0xff, c2 >> 8 & 0xff, c2 & 0xff};
796  mean_err += diff(argb1, argb2, s->trans_thresh);
797  }
798  src1 += src1_linesize;
799  src2 += src2_linesize;
800  }
801 
802  s->total_mean_err += mean_err;
803 
804  av_log(NULL, AV_LOG_INFO, "MEP:%.3f TotalMEP:%.3f\n",
805  mean_err / div, s->total_mean_err / (div * frame_count));
806 }
807 
809  const AVFrame *prv_src, const AVFrame *cur_src,
810  const AVFrame *prv_dst, AVFrame *cur_dst,
811  int *xp, int *yp, int *wp, int *hp)
812 {
813  int x_start = 0, y_start = 0;
814  int width = cur_src->width;
815  int height = cur_src->height;
816 
817  if (prv_src->data[0] && diff_mode == DIFF_MODE_RECTANGLE) {
818  int y;
819  int x_end = cur_src->width - 1,
820  y_end = cur_src->height - 1;
821  const uint32_t *prv_srcp = (const uint32_t *)prv_src->data[0];
822  const uint32_t *cur_srcp = (const uint32_t *)cur_src->data[0];
823  const uint8_t *prv_dstp = prv_dst->data[0];
824  uint8_t *cur_dstp = cur_dst->data[0];
825 
826  const int prv_src_linesize = prv_src->linesize[0] >> 2;
827  const int cur_src_linesize = cur_src->linesize[0] >> 2;
828  const int prv_dst_linesize = prv_dst->linesize[0];
829  const int cur_dst_linesize = cur_dst->linesize[0];
830 
831  /* skip common lines */
832  while (y_start < y_end && !memcmp(prv_srcp + y_start*prv_src_linesize,
833  cur_srcp + y_start*cur_src_linesize,
834  cur_src->width * 4)) {
835  memcpy(cur_dstp + y_start*cur_dst_linesize,
836  prv_dstp + y_start*prv_dst_linesize,
837  cur_dst->width);
838  y_start++;
839  }
840  while (y_end > y_start && !memcmp(prv_srcp + y_end*prv_src_linesize,
841  cur_srcp + y_end*cur_src_linesize,
842  cur_src->width * 4)) {
843  memcpy(cur_dstp + y_end*cur_dst_linesize,
844  prv_dstp + y_end*prv_dst_linesize,
845  cur_dst->width);
846  y_end--;
847  }
848 
849  height = y_end + 1 - y_start;
850 
851  /* skip common columns */
852  while (x_start < x_end) {
853  int same_column = 1;
854  for (y = y_start; y <= y_end; y++) {
855  if (prv_srcp[y*prv_src_linesize + x_start] != cur_srcp[y*cur_src_linesize + x_start]) {
856  same_column = 0;
857  break;
858  }
859  }
860  if (!same_column)
861  break;
862  x_start++;
863  }
864  while (x_end > x_start) {
865  int same_column = 1;
866  for (y = y_start; y <= y_end; y++) {
867  if (prv_srcp[y*prv_src_linesize + x_end] != cur_srcp[y*cur_src_linesize + x_end]) {
868  same_column = 0;
869  break;
870  }
871  }
872  if (!same_column)
873  break;
874  x_end--;
875  }
876  width = x_end + 1 - x_start;
877 
878  if (x_start) {
879  for (y = y_start; y <= y_end; y++)
880  memcpy(cur_dstp + y*cur_dst_linesize,
881  prv_dstp + y*prv_dst_linesize, x_start);
882  }
883  if (x_end != cur_src->width - 1) {
884  const int copy_len = cur_src->width - 1 - x_end;
885  for (y = y_start; y <= y_end; y++)
886  memcpy(cur_dstp + y*cur_dst_linesize + x_end + 1,
887  prv_dstp + y*prv_dst_linesize + x_end + 1,
888  copy_len);
889  }
890  }
891  *xp = x_start;
892  *yp = y_start;
893  *wp = width;
894  *hp = height;
895 }
896 
898 {
899  int x, y, w, h, ret;
900  AVFilterContext *ctx = inlink->dst;
901  PaletteUseContext *s = ctx->priv;
902  AVFilterLink *outlink = inlink->dst->outputs[0];
903 
904  AVFrame *out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
905  if (!out) {
906  *outf = NULL;
907  return AVERROR(ENOMEM);
908  }
910 
911  set_processing_window(s->diff_mode, s->last_in, in,
912  s->last_out, out, &x, &y, &w, &h);
913  av_frame_unref(s->last_in);
914  av_frame_unref(s->last_out);
915  if (av_frame_ref(s->last_in, in) < 0 ||
916  av_frame_ref(s->last_out, out) < 0 ||
917  av_frame_make_writable(s->last_in) < 0) {
918  av_frame_free(&out);
919  *outf = NULL;
920  return AVERROR(ENOMEM);
921  }
922 
923  ff_dlog(ctx, "%dx%d rect: (%d;%d) -> (%d,%d) [area:%dx%d]\n",
924  w, h, x, y, x+w, y+h, in->width, in->height);
925 
926  ret = s->set_frame(s, out, in, x, y, w, h);
927  if (ret < 0) {
928  av_frame_free(&out);
929  *outf = NULL;
930  return ret;
931  }
932  memcpy(out->data[1], s->palette, AVPALETTE_SIZE);
933  if (s->calc_mean_err)
934  debug_mean_error(s, in, out, inlink->frame_count_out);
935  *outf = out;
936  return 0;
937 }
938 
939 static int config_output(AVFilterLink *outlink)
940 {
941  int ret;
942  AVFilterContext *ctx = outlink->src;
943  PaletteUseContext *s = ctx->priv;
944 
946  if (ret < 0)
947  return ret;
948  s->fs.opt_repeatlast = 1; // only 1 frame in the palette
949  s->fs.in[1].before = s->fs.in[1].after = EXT_INFINITY;
950  s->fs.on_event = load_apply_palette;
951 
952  outlink->w = ctx->inputs[0]->w;
953  outlink->h = ctx->inputs[0]->h;
954 
955  outlink->time_base = ctx->inputs[0]->time_base;
956  if ((ret = ff_framesync_configure(&s->fs)) < 0)
957  return ret;
958  return 0;
959 }
960 
962 {
963  AVFilterContext *ctx = inlink->dst;
964 
965  if (inlink->w * inlink->h != AVPALETTE_COUNT) {
967  "Palette input must contain exactly %d pixels. "
968  "Specified input has %dx%d=%d pixels\n",
969  AVPALETTE_COUNT, inlink->w, inlink->h,
970  inlink->w * inlink->h);
971  return AVERROR(EINVAL);
972  }
973  return 0;
974 }
975 
976 static void load_palette(PaletteUseContext *s, const AVFrame *palette_frame)
977 {
978  int i, x, y;
979  const uint32_t *p = (const uint32_t *)palette_frame->data[0];
980  const int p_linesize = palette_frame->linesize[0] >> 2;
981 
982  s->transparency_index = -1;
983 
984  if (s->new) {
985  memset(s->palette, 0, sizeof(s->palette));
986  memset(s->map, 0, sizeof(s->map));
987  for (i = 0; i < CACHE_SIZE; i++)
988  av_freep(&s->cache[i].entries);
989  memset(s->cache, 0, sizeof(s->cache));
990  }
991 
992  i = 0;
993  for (y = 0; y < palette_frame->height; y++) {
994  for (x = 0; x < palette_frame->width; x++) {
995  s->palette[i] = p[x];
996  if (p[x]>>24 < s->trans_thresh) {
997  s->transparency_index = i; // we are assuming at most one transparent color in palette
998  }
999  i++;
1000  }
1001  p += p_linesize;
1002  }
1003 
1004  load_colormap(s);
1005 
1006  if (!s->new)
1007  s->palette_loaded = 1;
1008 }
1009 
1011 {
1012  AVFilterContext *ctx = fs->parent;
1013  AVFilterLink *inlink = ctx->inputs[0];
1014  PaletteUseContext *s = ctx->priv;
1015  AVFrame *master, *second, *out = NULL;
1016  int ret;
1017 
1018  // writable for error diffusal dithering
1020  if (ret < 0)
1021  return ret;
1022  if (!master || !second) {
1024  return AVERROR_BUG;
1025  }
1026  if (!s->palette_loaded) {
1027  load_palette(s, second);
1028  }
1031  if (ret < 0)
1032  return ret;
1033  return ff_filter_frame(ctx->outputs[0], out);
1034 }
1035 
1036 #define DEFINE_SET_FRAME(color_search, name, value) \
1037 static int set_frame_##name(PaletteUseContext *s, AVFrame *out, AVFrame *in, \
1038  int x_start, int y_start, int w, int h) \
1039 { \
1040  return set_frame(s, out, in, x_start, y_start, w, h, value, color_search); \
1041 }
1042 
1043 #define DEFINE_SET_FRAME_COLOR_SEARCH(color_search, color_search_macro) \
1044  DEFINE_SET_FRAME(color_search_macro, color_search##_##none, DITHERING_NONE) \
1045  DEFINE_SET_FRAME(color_search_macro, color_search##_##bayer, DITHERING_BAYER) \
1046  DEFINE_SET_FRAME(color_search_macro, color_search##_##heckbert, DITHERING_HECKBERT) \
1047  DEFINE_SET_FRAME(color_search_macro, color_search##_##floyd_steinberg, DITHERING_FLOYD_STEINBERG) \
1048  DEFINE_SET_FRAME(color_search_macro, color_search##_##sierra2, DITHERING_SIERRA2) \
1049  DEFINE_SET_FRAME(color_search_macro, color_search##_##sierra2_4a, DITHERING_SIERRA2_4A) \
1050 
1054 
1055 #define DITHERING_ENTRIES(color_search) { \
1056  set_frame_##color_search##_none, \
1057  set_frame_##color_search##_bayer, \
1058  set_frame_##color_search##_heckbert, \
1059  set_frame_##color_search##_floyd_steinberg, \
1060  set_frame_##color_search##_sierra2, \
1061  set_frame_##color_search##_sierra2_4a, \
1062 }
1063 
1065  DITHERING_ENTRIES(nns_iterative),
1066  DITHERING_ENTRIES(nns_recursive),
1067  DITHERING_ENTRIES(bruteforce),
1068 };
1069 
1070 static int dither_value(int p)
1071 {
1072  const int q = p ^ (p >> 3);
1073  return (p & 4) >> 2 | (q & 4) >> 1 \
1074  | (p & 2) << 1 | (q & 2) << 2 \
1075  | (p & 1) << 4 | (q & 1) << 5;
1076 }
1077 
1079 {
1080  PaletteUseContext *s = ctx->priv;
1081 
1082  s->last_in = av_frame_alloc();
1083  s->last_out = av_frame_alloc();
1084  if (!s->last_in || !s->last_out) {
1085  av_frame_free(&s->last_in);
1086  av_frame_free(&s->last_out);
1087  return AVERROR(ENOMEM);
1088  }
1089 
1090  s->set_frame = set_frame_lut[s->color_search_method][s->dither];
1091 
1092  if (s->dither == DITHERING_BAYER) {
1093  int i;
1094  const int delta = 1 << (5 - s->bayer_scale); // to avoid too much luma
1095 
1096  for (i = 0; i < FF_ARRAY_ELEMS(s->ordered_dither); i++)
1097  s->ordered_dither[i] = (dither_value(i) >> s->bayer_scale) - delta;
1098  }
1099 
1100  return 0;
1101 }
1102 
1104 {
1105  PaletteUseContext *s = ctx->priv;
1106  return ff_framesync_activate(&s->fs);
1107 }
1108 
1110 {
1111  int i;
1112  PaletteUseContext *s = ctx->priv;
1113 
1114  ff_framesync_uninit(&s->fs);
1115  for (i = 0; i < CACHE_SIZE; i++)
1116  av_freep(&s->cache[i].entries);
1117  av_frame_free(&s->last_in);
1118  av_frame_free(&s->last_out);
1119 }
1120 
1121 static const AVFilterPad paletteuse_inputs[] = {
1122  {
1123  .name = "default",
1124  .type = AVMEDIA_TYPE_VIDEO,
1125  },{
1126  .name = "palette",
1127  .type = AVMEDIA_TYPE_VIDEO,
1128  .config_props = config_input_palette,
1129  },
1130  { NULL }
1131 };
1132 
1134  {
1135  .name = "default",
1136  .type = AVMEDIA_TYPE_VIDEO,
1137  .config_props = config_output,
1138  },
1139  { NULL }
1140 };
1141 
1143  .name = "paletteuse",
1144  .description = NULL_IF_CONFIG_SMALL("Use a palette to downsample an input video stream."),
1145  .priv_size = sizeof(PaletteUseContext),
1147  .init = init,
1148  .uninit = uninit,
1149  .activate = activate,
1152  .priv_class = &paletteuse_class,
1153 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
cached_color::color
uint32_t color
Definition: vf_paletteuse.c:69
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:117
AV_BPRINT_SIZE_UNLIMITED
#define AV_BPRINT_SIZE_UNLIMITED
config_input_palette
static int config_input_palette(AVFilterLink *inlink)
Definition: vf_paletteuse.c:961
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
diff
static av_always_inline int diff(const uint8_t *c1, const uint8_t *c2, const int trans_thresh)
Definition: vf_paletteuse.c:169
get_dst_color_err
static av_always_inline int get_dst_color_err(PaletteUseContext *s, uint32_t c, int *er, int *eg, int *eb, const enum color_search_method search_method)
Definition: vf_paletteuse.c:375
colormap_nearest_node
static void colormap_nearest_node(const struct color_node *map, const int node_pos, const uint8_t *target, const int trans_thresh, struct nearest_color *nearest)
Definition: vf_paletteuse.c:215
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_paletteuse.c:139
PaletteUseContext::dot_filename
char * dot_filename
Definition: vf_paletteuse.c:102
r
const char * r
Definition: vf_curves.c:114
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
av_bprint_finalize
int av_bprint_finalize(AVBPrint *buf, char **ret_str)
Finalize a print buffer.
Definition: bprint.c:235
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:293
debug_mean_error
static void debug_mean_error(PaletteUseContext *s, const AVFrame *in1, const AVFrame *in2, int frame_count)
Definition: vf_paletteuse.c:778
out
FILE * out
Definition: movenc.c:54
color
Definition: vf_paletteuse.c:588
av_bprint_init
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max)
Definition: bprint.c:69
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1080
PaletteUseContext::last_out
AVFrame * last_out
Definition: vf_paletteuse.c:99
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
end
static av_cold int end(AVCodecContext *avctx)
Definition: avrndec.c:90
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_paletteuse.c:1078
set_frame_func
int(* set_frame_func)(struct PaletteUseContext *s, AVFrame *out, AVFrame *in, int x_start, int y_start, int width, int height)
Definition: vf_paletteuse.c:80
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
av_frame_make_writable
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:611
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_paletteuse.c:1109
AVFrame::width
int width
Definition: frame.h:353
w
uint8_t w
Definition: llviddspenc.c:38
av_dynarray2_add
void * av_dynarray2_add(void **tab_ptr, int *nb_ptr, size_t elem_size, const uint8_t *elem_data)
Add an element of size elem_size to a dynamic array.
Definition: mem.c:322
AVOption
AVOption.
Definition: opt.h:246
b
#define b
Definition: input.c:41
stack_node::dx2
int dx2
Definition: vf_paletteuse.c:255
data
const char data[16]
Definition: mxf.c:91
colormap_nearest_bruteforce
static av_always_inline uint8_t colormap_nearest_bruteforce(const uint32_t *palette, const uint8_t *argb, const int trans_thresh)
Definition: vf_paletteuse.c:185
PaletteUseContext::set_frame
set_frame_func set_frame
Definition: vf_paletteuse.c:94
ff_vf_paletteuse
AVFilter ff_vf_paletteuse
Definition: vf_paletteuse.c:1142
disp_tree
static int disp_tree(const struct color_node *node, const char *fname)
Definition: vf_paletteuse.c:532
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:148
c1
static const uint64_t c1
Definition: murmur3.c:49
FFFrameSync
Frame sync structure.
Definition: framesync.h:146
EXT_INFINITY
@ EXT_INFINITY
Extend the frame to infinity.
Definition: framesync.h:75
hash
uint8_t hash[HASH_SIZE]
Definition: movenc.c:57
PaletteUseContext::palette_loaded
int palette_loaded
Definition: vf_paletteuse.c:91
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
colormap_nearest_recursive
static av_always_inline uint8_t colormap_nearest_recursive(const struct color_node *node, const uint8_t *rgb, const int trans_thresh)
Definition: vf_paletteuse.c:246
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
stack_node::color_id
int color_id
Definition: vf_paletteuse.c:254
debug_accuracy
static int debug_accuracy(const struct color_node *node, const uint32_t *palette, const int trans_thresh, const enum color_search_method search_method)
Definition: vf_paletteuse.c:557
DIFF_MODE_NONE
@ DIFF_MODE_NONE
Definition: vf_paletteuse.c:53
NB_DITHERING
@ NB_DITHERING
Definition: vf_paletteuse.c:42
dither_value
static int dither_value(int p)
Definition: vf_paletteuse.c:1070
COLOR_SEARCH_BRUTEFORCE
@ COLOR_SEARCH_BRUTEFORCE
Definition: vf_paletteuse.c:48
src
#define src
Definition: vp8dsp.c:254
apply_palette
static int apply_palette(AVFilterLink *inlink, AVFrame *in, AVFrame **outf)
Definition: vf_paletteuse.c:897
PaletteUseContext::cache
struct cache_node cache[CACHE_SIZE]
Definition: vf_paletteuse.c:86
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
colormap_insert
static int colormap_insert(struct color_node *map, uint8_t *color_used, int *nb_used, const uint32_t *palette, const int trans_thresh, const struct color_rect *box)
Definition: vf_paletteuse.c:679
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
PaletteUseContext::ordered_dither
int ordered_dither[8 *8]
Definition: vf_paletteuse.c:96
colormap_nearest_iterative
static av_always_inline uint8_t colormap_nearest_iterative(const struct color_node *root, const uint8_t *target, const int trans_thresh)
Definition: vf_paletteuse.c:258
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
set_processing_window
static void set_processing_window(enum diff_mode diff_mode, const AVFrame *prv_src, const AVFrame *cur_src, const AVFrame *prv_dst, AVFrame *cur_dst, int *xp, int *yp, int *wp, int *hp)
Definition: vf_paletteuse.c:808
buf
void * buf
Definition: avisynth_c.h:766
av_cold
#define av_cold
Definition: attributes.h:84
av_fopen_utf8
FILE * av_fopen_utf8(const char *path, const char *mode)
Open a file using a UTF-8 filename.
Definition: file_open.c:158
INDENT
#define INDENT
Definition: vf_paletteuse.c:502
color_rect
Definition: vf_paletteuse.c:593
DEFINE_SET_FRAME_COLOR_SEARCH
#define DEFINE_SET_FRAME_COLOR_SEARCH(color_search, color_search_macro)
Definition: vf_paletteuse.c:1043
PaletteUseContext::bayer_scale
int bayer_scale
Definition: vf_paletteuse.c:95
width
#define width
s
#define s(width, name)
Definition: cbs_vp9.c:257
dithering_mode
dithering_mode
Definition: vf_paletteuse.c:35
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_paletteuse.c:939
g
const char * g
Definition: vf_curves.c:115
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:440
color_node::right_id
int right_id
Definition: vf_paletteuse.c:62
DITHERING_HECKBERT
@ DITHERING_HECKBERT
Definition: vf_paletteuse.c:38
stack_node
Definition: vf_paletteuse.c:253
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
filters.h
nearest_color::dist_sqd
int dist_sqd
Definition: vf_paletteuse.c:212
ctx
AVFormatContext * ctx
Definition: movenc.c:48
set_frame_lut
static const set_frame_func set_frame_lut[NB_COLOR_SEARCHES][NB_DITHERING]
Definition: vf_paletteuse.c:1064
color_rect::max
uint8_t max[3]
Definition: vf_paletteuse.c:595
f
#define f(width, name)
Definition: cbs_vp9.c:255
if
if(ret)
Definition: filter_design.txt:179
color_node::palette_id
uint8_t palette_id
Definition: vf_paletteuse.c:60
load_apply_palette
static int load_apply_palette(FFFrameSync *fs)
Definition: vf_paletteuse.c:1010
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:654
PaletteUseContext::dither
int dither
Definition: vf_paletteuse.c:92
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:259
AVPALETTE_SIZE
#define AVPALETTE_SIZE
Definition: pixfmt.h:32
COLORMAP_NEAREST
#define COLORMAP_NEAREST(search, palette, root, target, trans_thresh)
Definition: vf_paletteuse.c:330
get_next_color
static int get_next_color(const uint8_t *color_used, const uint32_t *palette, const int trans_thresh, int *component, const struct color_rect *box)
Definition: vf_paletteuse.c:616
DITHERING_ENTRIES
#define DITHERING_ENTRIES(color_search)
Definition: vf_paletteuse.c:1055
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
PaletteUseContext
Definition: vf_paletteuse.c:83
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
cmp_funcs
static const cmp_func cmp_funcs[]
Definition: vf_paletteuse.c:614
AVPALETTE_COUNT
#define AVPALETTE_COUNT
Definition: pixfmt.h:33
disp_node
static void disp_node(AVBPrint *buf, const struct color_node *map, int parent_id, int node_id, int depth)
Definition: vf_paletteuse.c:503
DITHERING_NONE
@ DITHERING_NONE
Definition: vf_paletteuse.c:36
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
paletteuse_options
static const AVOption paletteuse_options[]
Definition: vf_paletteuse.c:111
ff_dlog
#define ff_dlog(a,...)
Definition: tableprint_vlc.h:29
PaletteUseContext::trans_thresh
int trans_thresh
Definition: vf_paletteuse.c:90
qsort.h
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:188
ff_framesync_init_dualinput
int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
Initialize a frame sync structure for dualinput.
Definition: framesync.c:361
master
const char * master
Definition: vf_curves.c:117
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
nearest_color
Definition: vf_paletteuse.c:210
DITHERING_BAYER
@ DITHERING_BAYER
Definition: vf_paletteuse.c:37
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:119
DITHERING_FLOYD_STEINBERG
@ DITHERING_FLOYD_STEINBERG
Definition: vf_paletteuse.c:39
PaletteUseContext::palette
uint32_t palette[AVPALETTE_COUNT]
Definition: vf_paletteuse.c:88
color
static const uint32_t color[16+AV_CLASS_CATEGORY_NB]
Definition: log.c:92
DITHERING_SIERRA2
@ DITHERING_SIERRA2
Definition: vf_paletteuse.c:40
PaletteUseContext::fs
FFFrameSync fs
Definition: vf_paletteuse.c:85
split
static char * split(char *message, char delim)
Definition: af_channelmap.c:81
height
#define height
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:360
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
color_get
static av_always_inline int color_get(PaletteUseContext *s, uint32_t color, uint8_t a, uint8_t r, uint8_t g, uint8_t b, const enum color_search_method search_method)
Check if the requested color is in the cache already.
Definition: vf_paletteuse.c:341
DITHERING_SIERRA2_4A
@ DITHERING_SIERRA2_4A
Definition: vf_paletteuse.c:41
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:187
PaletteUseContext::transparency_index
int transparency_index
Definition: vf_paletteuse.c:89
internal.h
DECLARE_CMP_FUNC
#define DECLARE_CMP_FUNC(name, pos)
Definition: vf_paletteuse.c:600
activate
static int activate(AVFilterContext *ctx)
Definition: vf_paletteuse.c:1103
src1
#define src1
Definition: h264pred.c:139
OFFSET
#define OFFSET(x)
Definition: vf_paletteuse.c:109
in
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
Definition: audio_convert.c:326
bprint.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
cache_node
Definition: vf_paletteuse.c:73
AV_QSORT
#define AV_QSORT(p, num, type, cmp)
Quicksort This sort is fast, and fully inplace but not stable and it is possible to construct input t...
Definition: qsort.h:33
internal.h
cmp_pal_entry
static int cmp_pal_entry(const void *a, const void *b)
Definition: vf_paletteuse.c:725
delta
float delta
Definition: vorbis_enc_data.h:457
av_always_inline
#define av_always_inline
Definition: attributes.h:43
cache_node::entries
struct cached_color * entries
Definition: vf_paletteuse.c:74
uint8_t
uint8_t
Definition: audio_convert.c:194
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:60
PaletteUseContext::diff_mode
int diff_mode
Definition: vf_paletteuse.c:97
color_node::split
int split
Definition: vf_paletteuse.c:61
cached_color::pal_entry
uint8_t pal_entry
Definition: vf_paletteuse.c:70
load_colormap
static void load_colormap(PaletteUseContext *s)
Definition: vf_paletteuse.c:732
PaletteUseContext::total_mean_err
uint64_t total_mean_err
Definition: vf_paletteuse.c:105
diff_mode
diff_mode
Definition: vf_paletteuse.c:52
FLAGS
#define FLAGS
Definition: vf_paletteuse.c:110
AVFilter
Filter definition.
Definition: avfilter.h:144
cache_node::nb_entries
int nb_entries
Definition: vf_paletteuse.c:75
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
PaletteUseContext::color_search_method
int color_search_method
Definition: vf_paletteuse.c:103
av_bprintf
void av_bprintf(AVBPrint *buf, const char *fmt,...)
Definition: bprint.c:94
set_frame
static av_always_inline int set_frame(PaletteUseContext *s, AVFrame *out, AVFrame *in, int x_start, int y_start, int w, int h, enum dithering_mode dither, const enum color_search_method search_method)
Definition: vf_paletteuse.c:394
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen_template.c:38
NBITS
#define NBITS
Definition: vf_paletteuse.c:65
AVFrame::height
int height
Definition: frame.h:353
c2
static const uint64_t c2
Definition: murmur3.c:50
framesync.h
DIFF_MODE_RECTANGLE
@ DIFF_MODE_RECTANGLE
Definition: vf_paletteuse.c:54
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
COLOR_SEARCH_NNS_ITERATIVE
@ COLOR_SEARCH_NNS_ITERATIVE
Definition: vf_paletteuse.c:46
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:223
avfilter.h
cmp_func
int(* cmp_func)(const void *, const void *)
Definition: vf_paletteuse.c:598
PaletteUseContext::map
struct color_node map[AVPALETTE_COUNT]
Definition: vf_paletteuse.c:87
COLOR_SEARCH_NNS_RECURSIVE
@ COLOR_SEARCH_NNS_RECURSIVE
Definition: vf_paletteuse.c:47
PaletteUseContext::debug_accuracy
int debug_accuracy
Definition: vf_paletteuse.c:106
AVFilterContext
An instance of a filter.
Definition: avfilter.h:338
shift
static int shift(int a, int b)
Definition: sonic.c:82
color_node::val
uint8_t val[4]
Definition: vf_paletteuse.c:59
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
CACHE_SIZE
#define CACHE_SIZE
Definition: vf_paletteuse.c:66
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:85
color::pal_id
uint8_t pal_id
Definition: vf_paletteuse.c:590
NB_COLOR_SEARCHES
@ NB_COLOR_SEARCHES
Definition: vf_paletteuse.c:49
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:240
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
color::value
uint32_t value
Definition: vf_paletteuse.c:589
paletteuse_outputs
static const AVFilterPad paletteuse_outputs[]
Definition: vf_paletteuse.c:1133
PaletteUseContext::calc_mean_err
int calc_mean_err
Definition: vf_paletteuse.c:104
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
color_rect::min
uint8_t min[3]
Definition: vf_paletteuse.c:594
h
h
Definition: vp9dsp_template.c:2038
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:344
color_node::left_id
int left_id
Definition: vf_paletteuse.c:62
color_node
Definition: vf_paletteuse.c:58
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(paletteuse)
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:227
ff_framesync_dualinput_get_writable
int ff_framesync_dualinput_get_writable(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
Same as ff_framesync_dualinput_get(), but make sure that f0 is writable.
Definition: framesync.c:399
dither_color
static av_always_inline int dither_color(uint32_t px, int er, int eg, int eb, int scale, int shift)
Definition: vf_paletteuse.c:161
int
int
Definition: ffmpeg_filter.c:191
PaletteUseContext::last_in
AVFrame * last_in
Definition: vf_paletteuse.c:98
nearest_color::node_pos
int node_pos
Definition: vf_paletteuse.c:211
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:232
paletteuse_inputs
static const AVFilterPad paletteuse_inputs[]
Definition: vf_paletteuse.c:1121
load_palette
static void load_palette(PaletteUseContext *s, const AVFrame *palette_frame)
Definition: vf_paletteuse.c:976
cached_color
Definition: vf_paletteuse.c:68
color_search_method
color_search_method
Definition: vf_paletteuse.c:45
min
float min
Definition: vorbis_enc_data.h:456
NB_DIFF_MODE
@ NB_DIFF_MODE
Definition: vf_paletteuse.c:55
dither
static const uint8_t dither[8][8]
Definition: vf_fspp.c:57