FFmpeg
pngenc.c
Go to the documentation of this file.
1 /*
2  * PNG image format
3  * Copyright (c) 2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include "avcodec.h"
23 #include "codec_internal.h"
24 #include "encode.h"
25 #include "exif_internal.h"
26 #include "bytestream.h"
27 #include "lossless_videoencdsp.h"
28 #include "png.h"
29 #include "apng.h"
30 #include "zlib_wrapper.h"
31 
32 #include "libavutil/avassert.h"
33 #include "libavutil/buffer.h"
34 #include "libavutil/crc.h"
35 #include "libavutil/csp.h"
36 #include "libavutil/libm.h"
38 #include "libavutil/mem.h"
39 #include "libavutil/opt.h"
40 #include "libavutil/rational.h"
41 #include "libavutil/stereo3d.h"
42 
43 #include <zlib.h>
44 
45 #define IOBUF_SIZE 4096
46 
47 typedef struct APNGFctlChunk {
48  uint32_t sequence_number;
49  uint32_t width, height;
50  uint32_t x_offset, y_offset;
51  uint16_t delay_num, delay_den;
52  uint8_t dispose_op, blend_op;
54 
55 typedef struct PNGEncContext {
56  AVClass *class;
58 
59  uint8_t *bytestream;
60  uint8_t *bytestream_start;
61  uint8_t *bytestream_end;
62 
64 
66  uint8_t buf[IOBUF_SIZE];
67  int dpi; ///< Physical pixel density, in dots per inch, if set
68  int dpm; ///< Physical pixel density, in dots per meter, if set
69 
71  int bit_depth;
74 
75  // APNG
76  uint32_t palette_checksum; // Used to ensure a single unique palette
77  uint32_t sequence_number;
79  uint8_t *extra_data;
81 
88 
89 static void png_get_interlaced_row(uint8_t *dst, int row_size,
90  int bits_per_pixel, int pass,
91  const uint8_t *src, int width)
92 {
93  int x, mask, dst_x, j, b, bpp;
94  uint8_t *d;
95  const uint8_t *s;
96  static const int masks[] = {0x80, 0x08, 0x88, 0x22, 0xaa, 0x55, 0xff};
97 
98  mask = masks[pass];
99  switch (bits_per_pixel) {
100  case 1:
101  memset(dst, 0, row_size);
102  dst_x = 0;
103  for (x = 0; x < width; x++) {
104  j = (x & 7);
105  if ((mask << j) & 0x80) {
106  b = (src[x >> 3] >> (7 - j)) & 1;
107  dst[dst_x >> 3] |= b << (7 - (dst_x & 7));
108  dst_x++;
109  }
110  }
111  break;
112  default:
113  bpp = bits_per_pixel >> 3;
114  d = dst;
115  s = src;
116  for (x = 0; x < width; x++) {
117  j = x & 7;
118  if ((mask << j) & 0x80) {
119  memcpy(d, s, bpp);
120  d += bpp;
121  }
122  s += bpp;
123  }
124  break;
125  }
126 }
127 
128 static void sub_png_paeth_prediction(uint8_t *dst, const uint8_t *src, const uint8_t *top,
129  int w, int bpp)
130 {
131  int i;
132  for (i = 0; i < w; i++) {
133  int a, b, c, p, pa, pb, pc;
134 
135  a = src[i - bpp];
136  b = top[i];
137  c = top[i - bpp];
138 
139  p = b - c;
140  pc = a - c;
141 
142  pa = abs(p);
143  pb = abs(pc);
144  pc = abs(p + pc);
145 
146  if (pa <= pb && pa <= pc)
147  p = a;
148  else if (pb <= pc)
149  p = b;
150  else
151  p = c;
152  dst[i] = src[i] - p;
153  }
154 }
155 
156 static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
157 {
158  const uint8_t *src1 = src + bpp;
159  const uint8_t *src2 = src;
160  int x, unaligned_w;
161 
162  memcpy(dst, src, bpp);
163  dst += bpp;
164  size -= bpp;
165  unaligned_w = FFMIN(32 - bpp, size);
166  for (x = 0; x < unaligned_w; x++)
167  *dst++ = *src1++ - *src2++;
168  size -= unaligned_w;
169  c->llvidencdsp.diff_bytes(dst, src1, src2, size);
170 }
171 
172 static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type,
173  const uint8_t *src, const uint8_t *top, int size, int bpp)
174 {
175  int i;
176 
177  switch (filter_type) {
179  memcpy(dst, src, size);
180  break;
182  sub_left_prediction(c, dst, src, bpp, size);
183  break;
184  case PNG_FILTER_VALUE_UP:
185  c->llvidencdsp.diff_bytes(dst, src, top, size);
186  break;
188  for (i = 0; i < bpp; i++)
189  dst[i] = src[i] - (top[i] >> 1);
190  for (; i < size; i++)
191  dst[i] = src[i] - ((src[i - bpp] + top[i]) >> 1);
192  break;
194  for (i = 0; i < bpp; i++)
195  dst[i] = src[i] - top[i];
196  sub_png_paeth_prediction(dst + i, src + i, top + i, size - i, bpp);
197  break;
198  default:
199  av_unreachable("PNG_FILTER_VALUE_MIXED can't happen here and all others are covered");
200  }
201 }
202 
203 static uint8_t *png_choose_filter(PNGEncContext *s, uint8_t *dst,
204  const uint8_t *src, const uint8_t *top, int size, int bpp)
205 {
206  int pred = s->filter_type;
207  av_assert0(bpp || !pred);
208  if (!top && pred)
210  if (pred == PNG_FILTER_VALUE_MIXED) {
211  int i;
212  int cost, bcost = INT_MAX;
213  uint8_t *buf1 = dst, *buf2 = dst + size + 16;
214  for (pred = 0; pred < 5; pred++) {
215  png_filter_row(s, buf1 + 1, pred, src, top, size, bpp);
216  buf1[0] = pred;
217  cost = 0;
218  for (i = 0; i <= size; i++)
219  cost += abs((int8_t) buf1[i]);
220  if (cost < bcost) {
221  bcost = cost;
222  FFSWAP(uint8_t *, buf1, buf2);
223  }
224  }
225  return buf2;
226  } else {
227  png_filter_row(s, dst + 1, pred, src, top, size, bpp);
228  dst[0] = pred;
229  return dst;
230  }
231 }
232 
233 static void png_write_chunk(uint8_t **f, uint32_t tag,
234  const uint8_t *buf, int length)
235 {
236  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
237  uint32_t crc = ~0U;
238  uint8_t tagbuf[4];
239 
240  bytestream_put_be32(f, length);
241  AV_WL32(tagbuf, tag);
242  crc = av_crc(crc_table, crc, tagbuf, 4);
243  bytestream_put_be32(f, av_bswap32(tag));
244  if (length > 0) {
245  crc = av_crc(crc_table, crc, buf, length);
246  if (*f != buf)
247  memcpy(*f, buf, length);
248  *f += length;
249  }
250  bytestream_put_be32(f, ~crc);
251 }
252 
254  const uint8_t *buf, int length)
255 {
256  PNGEncContext *s = avctx->priv_data;
257  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
258  uint32_t crc = ~0U;
259 
260  if (avctx->codec_id == AV_CODEC_ID_PNG || avctx->frame_num == 0) {
261  png_write_chunk(&s->bytestream, MKTAG('I', 'D', 'A', 'T'), buf, length);
262  return;
263  }
264 
265  bytestream_put_be32(&s->bytestream, length + 4);
266 
267  bytestream_put_be32(&s->bytestream, MKBETAG('f', 'd', 'A', 'T'));
268  bytestream_put_be32(&s->bytestream, s->sequence_number);
269  crc = av_crc(crc_table, crc, s->bytestream - 8, 8);
270 
271  crc = av_crc(crc_table, crc, buf, length);
272  memcpy(s->bytestream, buf, length);
273  s->bytestream += length;
274 
275  bytestream_put_be32(&s->bytestream, ~crc);
276 
277  ++s->sequence_number;
278 }
279 
280 /* XXX: do filtering */
281 static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
282 {
283  PNGEncContext *s = avctx->priv_data;
284  z_stream *const zstream = &s->zstream.zstream;
285  int ret;
286 
287  zstream->avail_in = size;
288  zstream->next_in = data;
289  while (zstream->avail_in > 0) {
290  ret = deflate(zstream, Z_NO_FLUSH);
291  if (ret != Z_OK)
292  return -1;
293  if (zstream->avail_out == 0) {
294  if (s->bytestream_end - s->bytestream > IOBUF_SIZE + 100)
295  png_write_image_data(avctx, s->buf, IOBUF_SIZE);
296  zstream->avail_out = IOBUF_SIZE;
297  zstream->next_out = s->buf;
298  }
299  }
300  return 0;
301 }
302 
303 #define PNG_LRINT(d, divisor) lrint((d) * (divisor))
304 #define PNG_Q2D(q, divisor) PNG_LRINT(av_q2d(q), (divisor))
305 #define AV_WB32_PNG_D(buf, q) AV_WB32(buf, PNG_Q2D(q, 100000))
306 static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
307 {
309  if (!desc)
310  return 0;
311 
312  AV_WB32_PNG_D(buf, desc->wp.x);
313  AV_WB32_PNG_D(buf + 4, desc->wp.y);
314  AV_WB32_PNG_D(buf + 8, desc->prim.r.x);
315  AV_WB32_PNG_D(buf + 12, desc->prim.r.y);
316  AV_WB32_PNG_D(buf + 16, desc->prim.g.x);
317  AV_WB32_PNG_D(buf + 20, desc->prim.g.y);
318  AV_WB32_PNG_D(buf + 24, desc->prim.b.x);
319  AV_WB32_PNG_D(buf + 28, desc->prim.b.y);
320 
321  return 1;
322 }
323 
324 static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
325 {
326  double gamma = av_csp_approximate_trc_gamma(trc);
327  if (gamma <= 1e-6)
328  return 0;
329 
330  AV_WB32(buf, PNG_LRINT(1.0 / gamma, 100000));
331  return 1;
332 }
333 
335 {
336  z_stream *const zstream = &s->zstream.zstream;
337  const AVDictionaryEntry *entry;
338  const char *name;
339  uint8_t *start, *buf;
340  int ret;
341 
342  if (!sd || !sd->size)
343  return 0;
344  zstream->next_in = sd->data;
345  zstream->avail_in = sd->size;
346 
347  /* write the chunk contents first */
348  start = s->bytestream + 8; /* make room for iCCP tag + length */
349  buf = start;
350 
351  /* profile description */
352  entry = av_dict_get(sd->metadata, "name", NULL, 0);
353  name = (entry && entry->value[0]) ? entry->value : "icc";
354  for (int i = 0;; i++) {
355  char c = (i == 79) ? 0 : name[i];
356  bytestream_put_byte(&buf, c);
357  if (!c)
358  break;
359  }
360 
361  /* compression method and profile data */
362  bytestream_put_byte(&buf, 0);
363  zstream->next_out = buf;
364  zstream->avail_out = s->bytestream_end - buf;
365  ret = deflate(zstream, Z_FINISH);
366  deflateReset(zstream);
367  if (ret != Z_STREAM_END)
368  return AVERROR_EXTERNAL;
369 
370  /* rewind to the start and write the chunk header/crc */
371  png_write_chunk(&s->bytestream, MKTAG('i', 'C', 'C', 'P'), start,
372  zstream->next_out - start);
373  return 0;
374 }
375 
376 static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
377 {
378  AVFrameSideData *side_data;
379  PNGEncContext *s = avctx->priv_data;
380  AVBufferRef *exif_data = NULL;
381  int ret;
382 
383  /* write png header */
384  AV_WB32(s->buf, avctx->width);
385  AV_WB32(s->buf + 4, avctx->height);
386  s->buf[8] = s->bit_depth;
387  s->buf[9] = s->color_type;
388  s->buf[10] = 0; /* compression type */
389  s->buf[11] = 0; /* filter type */
390  s->buf[12] = s->is_progressive; /* interlace type */
391  png_write_chunk(&s->bytestream, MKTAG('I', 'H', 'D', 'R'), s->buf, 13);
392 
393  /* write physical information */
394  if (s->dpm) {
395  AV_WB32(s->buf, s->dpm);
396  AV_WB32(s->buf + 4, s->dpm);
397  s->buf[8] = 1; /* unit specifier is meter */
398  } else {
399  AV_WB32(s->buf, avctx->sample_aspect_ratio.num);
400  AV_WB32(s->buf + 4, avctx->sample_aspect_ratio.den);
401  s->buf[8] = 0; /* unit specifier is unknown */
402  }
403  png_write_chunk(&s->bytestream, MKTAG('p', 'H', 'Y', 's'), s->buf, 9);
404 
405  /* write stereoscopic information */
407  if (side_data) {
408  AVStereo3D *stereo3d = (AVStereo3D *)side_data->data;
409  switch (stereo3d->type) {
411  s->buf[0] = ((stereo3d->flags & AV_STEREO3D_FLAG_INVERT) == 0) ? 1 : 0;
412  png_write_chunk(&s->bytestream, MKTAG('s', 'T', 'E', 'R'), s->buf, 1);
413  break;
414  case AV_STEREO3D_2D:
415  break;
416  default:
417  av_log(avctx, AV_LOG_WARNING, "Only side-by-side stereo3d flag can be defined within sTER chunk\n");
418  break;
419  }
420  }
421 
422  ret = ff_exif_get_buffer(avctx, pict, &exif_data, AV_EXIF_TIFF_HEADER);
423  if (exif_data) {
424  // png_write_chunk accepts an int, not a size_t, so we have to check overflow
425  if (exif_data->size > INT_MAX - AV_INPUT_BUFFER_PADDING_SIZE)
426  // that's a very big exif chunk, probably a bug
427  av_log(avctx, AV_LOG_ERROR, "extremely large EXIF buffer detected, not writing\n");
428  else
429  png_write_chunk(&s->bytestream, MKTAG('e','X','I','f'), exif_data->data, exif_data->size);
430  av_buffer_unref(&exif_data);
431  } else if (ret < 0) {
432  av_log(avctx, AV_LOG_WARNING, "unable to attach EXIF metadata: %s\n", av_err2str(ret));
433  }
434 
436  if ((ret = png_write_iccp(s, side_data)))
437  return ret;
438 
439  /* write colorspace information */
440  if (pict->color_primaries == AVCOL_PRI_BT709 &&
442  s->buf[0] = 1; /* rendering intent, relative colorimetric by default */
443  png_write_chunk(&s->bytestream, MKTAG('s', 'R', 'G', 'B'), s->buf, 1);
444  } else if (pict->color_trc != AVCOL_TRC_UNSPECIFIED && !side_data) {
445  /*
446  * Avoid writing cICP if the transfer is unknown. Known primaries
447  * with unknown transfer can be handled by cHRM.
448  *
449  * We also avoid writing cICP if an ICC Profile is present, because
450  * the standard requires that cICP overrides iCCP.
451  *
452  * These values match H.273 so no translation is needed.
453  */
454  s->buf[0] = pict->color_primaries;
455  s->buf[1] = pict->color_trc;
456  s->buf[2] = 0; /* colorspace = RGB */
457  s->buf[3] = pict->color_range == AVCOL_RANGE_MPEG ? 0 : 1;
458  png_write_chunk(&s->bytestream, MKTAG('c', 'I', 'C', 'P'), s->buf, 4);
459  }
460 
462  if (side_data) {
463  AVContentLightMetadata *clli = (AVContentLightMetadata *) side_data->data;
464  AV_WB32(s->buf, clli->MaxCLL * 10000);
465  AV_WB32(s->buf + 4, clli->MaxFALL * 10000);
466  png_write_chunk(&s->bytestream, MKTAG('c', 'L', 'L', 'I'), s->buf, 8);
467  }
468 
470  if (side_data) {
472  if (mdcv->has_luminance && mdcv->has_primaries) {
473  for (int i = 0; i < 3; i++) {
474  AV_WB16(s->buf + 2*i, PNG_Q2D(mdcv->display_primaries[i][0], 50000));
475  AV_WB16(s->buf + 2*i + 2, PNG_Q2D(mdcv->display_primaries[i][1], 50000));
476  }
477  AV_WB16(s->buf + 12, PNG_Q2D(mdcv->white_point[0], 50000));
478  AV_WB16(s->buf + 14, PNG_Q2D(mdcv->white_point[1], 50000));
479  AV_WB32(s->buf + 16, PNG_Q2D(mdcv->max_luminance, 10000));
480  AV_WB32(s->buf + 20, PNG_Q2D(mdcv->min_luminance, 10000));
481  png_write_chunk(&s->bytestream, MKTAG('m', 'D', 'C', 'V'), s->buf, 24);
482  }
483  }
484 
485  if (png_get_chrm(pict->color_primaries, s->buf))
486  png_write_chunk(&s->bytestream, MKTAG('c', 'H', 'R', 'M'), s->buf, 32);
487  if (png_get_gama(pict->color_trc, s->buf))
488  png_write_chunk(&s->bytestream, MKTAG('g', 'A', 'M', 'A'), s->buf, 4);
489 
490  if (avctx->bits_per_raw_sample > 0 &&
491  avctx->bits_per_raw_sample < (s->color_type & PNG_COLOR_MASK_PALETTE ? 8 : s->bit_depth)) {
492  int len = s->color_type & PNG_COLOR_MASK_PALETTE ? 3 : ff_png_get_nb_channels(s->color_type);
493  memset(s->buf, avctx->bits_per_raw_sample, len);
494  png_write_chunk(&s->bytestream, MKTAG('s', 'B', 'I', 'T'), s->buf, len);
495  }
496 
497  /* put the palette if needed, must be after colorspace information */
498  if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
499  int has_alpha, alpha, i;
500  unsigned int v;
501  uint32_t *palette;
502  uint8_t *ptr, *alpha_ptr;
503 
504  palette = (uint32_t *)pict->data[1];
505  ptr = s->buf;
506  alpha_ptr = s->buf + 256 * 3;
507  has_alpha = 0;
508  for (i = 0; i < 256; i++) {
509  v = palette[i];
510  alpha = v >> 24;
511  if (alpha != 0xff)
512  has_alpha = 1;
513  *alpha_ptr++ = alpha;
514  bytestream_put_be24(&ptr, v);
515  }
516  png_write_chunk(&s->bytestream,
517  MKTAG('P', 'L', 'T', 'E'), s->buf, 256 * 3);
518  if (has_alpha) {
519  png_write_chunk(&s->bytestream,
520  MKTAG('t', 'R', 'N', 'S'), s->buf + 256 * 3, 256);
521  }
522  }
523 
524  return 0;
525 }
526 
527 static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
528 {
529  PNGEncContext *s = avctx->priv_data;
530  z_stream *const zstream = &s->zstream.zstream;
531  const AVFrame *const p = pict;
532  int y, len, ret;
533  int row_size, pass_row_size;
534  uint8_t *crow_buf, *crow;
535  uint8_t *crow_base = NULL;
536  uint8_t *progressive_buf = NULL;
537  uint8_t *top_buf = NULL;
538 
539  row_size = (pict->width * s->bits_per_pixel + 7) >> 3;
540 
541  crow_base = av_malloc((row_size + 32) << (s->filter_type == PNG_FILTER_VALUE_MIXED));
542  if (!crow_base) {
543  ret = AVERROR(ENOMEM);
544  goto the_end;
545  }
546  // pixel data should be aligned, but there's a control byte before it
547  crow_buf = crow_base + 15;
548  if (s->is_progressive) {
549  progressive_buf = av_malloc(row_size + 1);
550  top_buf = av_malloc(row_size + 1);
551  if (!progressive_buf || !top_buf) {
552  ret = AVERROR(ENOMEM);
553  goto the_end;
554  }
555  }
556 
557  /* put each row */
558  zstream->avail_out = IOBUF_SIZE;
559  zstream->next_out = s->buf;
560  if (s->is_progressive) {
561  int pass;
562 
563  for (pass = 0; pass < NB_PASSES; pass++) {
564  /* NOTE: a pass is completely omitted if no pixels would be
565  * output */
566  pass_row_size = ff_png_pass_row_size(pass, s->bits_per_pixel, pict->width);
567  if (pass_row_size > 0) {
568  uint8_t *top = NULL;
569  for (y = 0; y < pict->height; y++)
570  if ((ff_png_pass_ymask[pass] << (y & 7)) & 0x80) {
571  const uint8_t *ptr = p->data[0] + y * p->linesize[0];
572  FFSWAP(uint8_t *, progressive_buf, top_buf);
573  png_get_interlaced_row(progressive_buf, pass_row_size,
574  s->bits_per_pixel, pass,
575  ptr, pict->width);
576  crow = png_choose_filter(s, crow_buf, progressive_buf,
577  top, pass_row_size, s->bits_per_pixel >> 3);
578  png_write_row(avctx, crow, pass_row_size + 1);
579  top = progressive_buf;
580  }
581  }
582  }
583  } else {
584  const uint8_t *top = NULL;
585  for (y = 0; y < pict->height; y++) {
586  const uint8_t *ptr = p->data[0] + y * p->linesize[0];
587  crow = png_choose_filter(s, crow_buf, ptr, top,
588  row_size, s->bits_per_pixel >> 3);
589  png_write_row(avctx, crow, row_size + 1);
590  top = ptr;
591  }
592  }
593  /* compress last bytes */
594  for (;;) {
595  ret = deflate(zstream, Z_FINISH);
596  if (ret == Z_OK || ret == Z_STREAM_END) {
597  len = IOBUF_SIZE - zstream->avail_out;
598  if (len > 0 && s->bytestream_end - s->bytestream > len + 100) {
599  png_write_image_data(avctx, s->buf, len);
600  }
601  zstream->avail_out = IOBUF_SIZE;
602  zstream->next_out = s->buf;
603  if (ret == Z_STREAM_END)
604  break;
605  } else {
606  ret = -1;
607  goto the_end;
608  }
609  }
610 
611  ret = 0;
612 
613 the_end:
614  av_freep(&crow_base);
615  av_freep(&progressive_buf);
616  av_freep(&top_buf);
617  deflateReset(zstream);
618  return ret;
619 }
620 
621 static int add_icc_profile_size(AVCodecContext *avctx, const AVFrame *pict,
622  uint64_t *max_packet_size)
623 {
624  PNGEncContext *s = avctx->priv_data;
625  const AVFrameSideData *sd;
626  const int hdr_size = 128;
627  uint64_t new_pkt_size;
628  uLong bound;
629 
630  if (!pict)
631  return 0;
633  if (!sd || !sd->size)
634  return 0;
635  if (sd->size != (uLong) sd->size)
636  return AVERROR_INVALIDDATA;
637 
638  bound = deflateBound(&s->zstream.zstream, sd->size);
639  if (bound > INT32_MAX - hdr_size)
640  return AVERROR_INVALIDDATA;
641 
642  new_pkt_size = *max_packet_size + bound + hdr_size;
643  if (new_pkt_size < *max_packet_size)
644  return AVERROR_INVALIDDATA;
645  *max_packet_size = new_pkt_size;
646  return 0;
647 }
648 
649 static int add_exif_profile_size(AVCodecContext *avctx, const AVFrame *pict,
650  uint64_t *max_packet_size)
651 {
652  const AVFrameSideData *sd;
653  uint64_t new_pkt_size;
654  /* includes orientation tag */
655  const int base_exif_size = 92;
656  uint64_t estimated_exif_size;
657 
659  estimated_exif_size = sd ? sd->size : 0;
661  if (sd)
662  estimated_exif_size += base_exif_size;
663 
664  if (!estimated_exif_size)
665  return 0;
666 
667  /* 12 is the png chunk header size */
668  new_pkt_size = *max_packet_size + estimated_exif_size + 12;
669  if (new_pkt_size < *max_packet_size)
670  return AVERROR_INVALIDDATA;
671 
672  *max_packet_size = new_pkt_size;
673 
674  return 0;
675 }
676 
677 static int encode_png(AVCodecContext *avctx, AVPacket *pkt,
678  const AVFrame *pict, int *got_packet)
679 {
680  PNGEncContext *s = avctx->priv_data;
681  int ret;
682  int enc_row_size;
683  uint64_t max_packet_size;
684 
685  enc_row_size = deflateBound(&s->zstream.zstream,
686  (avctx->width * s->bits_per_pixel + 7) >> 3);
687  max_packet_size =
688  FF_INPUT_BUFFER_MIN_SIZE + // headers
689  avctx->height * (
690  enc_row_size +
691  12 * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // IDAT * ceil(enc_row_size / IOBUF_SIZE)
692  );
693  if ((ret = add_icc_profile_size(avctx, pict, &max_packet_size)))
694  return ret;
695  ret = add_exif_profile_size(avctx, pict, &max_packet_size);
696  if (ret < 0)
697  return ret;
698 
699  ret = ff_alloc_packet(avctx, pkt, max_packet_size);
700  if (ret < 0)
701  return ret;
702 
703  s->bytestream_start =
704  s->bytestream = pkt->data;
705  s->bytestream_end = pkt->data + pkt->size;
706 
707  AV_WB64(s->bytestream, PNGSIG);
708  s->bytestream += 8;
709 
710  ret = encode_headers(avctx, pict);
711  if (ret < 0)
712  return ret;
713 
714  ret = encode_frame(avctx, pict);
715  if (ret < 0)
716  return ret;
717 
718  png_write_chunk(&s->bytestream, MKTAG('I', 'E', 'N', 'D'), NULL, 0);
719 
720  pkt->size = s->bytestream - s->bytestream_start;
722  *got_packet = 1;
723 
724  return 0;
725 }
726 
728  APNGFctlChunk *fctl_chunk, uint8_t bpp)
729 {
730  // output: background, input: foreground
731  // output the image such that when blended with the background, will produce the foreground
732 
733  unsigned int x, y;
734  unsigned int leftmost_x = input->width;
735  unsigned int rightmost_x = 0;
736  unsigned int topmost_y = input->height;
737  unsigned int bottommost_y = 0;
738  const uint8_t *input_data = input->data[0];
739  uint8_t *output_data = output->data[0];
740  ptrdiff_t input_linesize = input->linesize[0];
741  ptrdiff_t output_linesize = output->linesize[0];
742 
743  // Find bounding box of changes
744  for (y = 0; y < input->height; ++y) {
745  for (x = 0; x < input->width; ++x) {
746  if (!memcmp(input_data + bpp * x, output_data + bpp * x, bpp))
747  continue;
748 
749  if (x < leftmost_x)
750  leftmost_x = x;
751  if (x >= rightmost_x)
752  rightmost_x = x + 1;
753  if (y < topmost_y)
754  topmost_y = y;
755  if (y >= bottommost_y)
756  bottommost_y = y + 1;
757  }
758 
759  input_data += input_linesize;
760  output_data += output_linesize;
761  }
762 
763  if (leftmost_x == input->width && rightmost_x == 0) {
764  // Empty frame
765  // APNG does not support empty frames, so we make it a 1x1 frame
766  leftmost_x = topmost_y = 0;
767  rightmost_x = bottommost_y = 1;
768  }
769 
770  // Do actual inverse blending
771  if (fctl_chunk->blend_op == APNG_BLEND_OP_SOURCE) {
772  output_data = output->data[0];
773  for (y = topmost_y; y < bottommost_y; ++y) {
774  memcpy(output_data,
775  input->data[0] + input_linesize * y + bpp * leftmost_x,
776  bpp * (rightmost_x - leftmost_x));
777  output_data += output_linesize;
778  }
779  } else { // APNG_BLEND_OP_OVER
780  size_t transparent_palette_index;
781  uint32_t *palette;
782 
783  switch (input->format) {
784  case AV_PIX_FMT_RGBA64BE:
785  case AV_PIX_FMT_YA16BE:
786  case AV_PIX_FMT_RGBA:
787  case AV_PIX_FMT_GRAY8A:
788  break;
789 
790  case AV_PIX_FMT_PAL8:
791  palette = (uint32_t*)input->data[1];
792  for (transparent_palette_index = 0; transparent_palette_index < 256; ++transparent_palette_index)
793  if (palette[transparent_palette_index] >> 24 == 0)
794  break;
795  break;
796 
797  default:
798  // No alpha, so blending not possible
799  return -1;
800  }
801 
802  for (y = topmost_y; y < bottommost_y; ++y) {
803  const uint8_t *foreground = input->data[0] + input_linesize * y + bpp * leftmost_x;
804  uint8_t *background = output->data[0] + output_linesize * y + bpp * leftmost_x;
805  output_data = output->data[0] + output_linesize * (y - topmost_y);
806  for (x = leftmost_x; x < rightmost_x; ++x, foreground += bpp, background += bpp, output_data += bpp) {
807  if (!memcmp(foreground, background, bpp)) {
808  if (input->format == AV_PIX_FMT_PAL8) {
809  if (transparent_palette_index == 256) {
810  // Need fully transparent colour, but none exists
811  return -1;
812  }
813 
814  *output_data = transparent_palette_index;
815  } else {
816  memset(output_data, 0, bpp);
817  }
818  continue;
819  }
820 
821  // Check for special alpha values, since full inverse
822  // alpha-on-alpha blending is rarely possible, and when
823  // possible, doesn't compress much better than
824  // APNG_BLEND_OP_SOURCE blending
825  switch (input->format) {
826  case AV_PIX_FMT_RGBA64BE:
827  if (((uint16_t*)foreground)[3] == 0xffff ||
828  ((uint16_t*)background)[3] == 0)
829  break;
830  return -1;
831 
832  case AV_PIX_FMT_YA16BE:
833  if (((uint16_t*)foreground)[1] == 0xffff ||
834  ((uint16_t*)background)[1] == 0)
835  break;
836  return -1;
837 
838  case AV_PIX_FMT_RGBA:
839  if (foreground[3] == 0xff || background[3] == 0)
840  break;
841  return -1;
842 
843  case AV_PIX_FMT_GRAY8A:
844  if (foreground[1] == 0xff || background[1] == 0)
845  break;
846  return -1;
847 
848  case AV_PIX_FMT_PAL8:
849  if (palette[*foreground] >> 24 == 0xff ||
850  palette[*background] >> 24 == 0)
851  break;
852  return -1;
853 
854  default:
855  av_unreachable("Pixfmt has been checked before");
856  }
857 
858  memmove(output_data, foreground, bpp);
859  }
860  }
861  }
862 
863  output->width = rightmost_x - leftmost_x;
864  output->height = bottommost_y - topmost_y;
865  fctl_chunk->width = output->width;
866  fctl_chunk->height = output->height;
867  fctl_chunk->x_offset = leftmost_x;
868  fctl_chunk->y_offset = topmost_y;
869 
870  return 0;
871 }
872 
873 static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict,
874  APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
875 {
876  PNGEncContext *s = avctx->priv_data;
877  int ret;
878  unsigned int y;
879  AVFrame* diffFrame;
880  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
881  uint8_t *original_bytestream, *original_bytestream_end;
882  uint8_t *temp_bytestream = 0, *temp_bytestream_end;
883  uint32_t best_sequence_number;
884  uint8_t *best_bytestream;
885  size_t best_bytestream_size = SIZE_MAX;
886  APNGFctlChunk last_fctl_chunk = *best_last_fctl_chunk;
887  APNGFctlChunk fctl_chunk = *best_fctl_chunk;
888 
889  if (avctx->frame_num == 0) {
890  best_fctl_chunk->width = pict->width;
891  best_fctl_chunk->height = pict->height;
892  best_fctl_chunk->x_offset = 0;
893  best_fctl_chunk->y_offset = 0;
894  best_fctl_chunk->blend_op = APNG_BLEND_OP_SOURCE;
895  return encode_frame(avctx, pict);
896  }
897 
898  diffFrame = av_frame_alloc();
899  if (!diffFrame)
900  return AVERROR(ENOMEM);
901 
902  diffFrame->format = pict->format;
903  diffFrame->width = pict->width;
904  diffFrame->height = pict->height;
905  if ((ret = av_frame_get_buffer(diffFrame, 0)) < 0)
906  goto fail;
907 
908  original_bytestream = s->bytestream;
909  original_bytestream_end = s->bytestream_end;
910 
911  temp_bytestream = av_malloc(original_bytestream_end - original_bytestream);
912  if (!temp_bytestream) {
913  ret = AVERROR(ENOMEM);
914  goto fail;
915  }
916  temp_bytestream_end = temp_bytestream + (original_bytestream_end - original_bytestream);
917 
918  for (last_fctl_chunk.dispose_op = 0; last_fctl_chunk.dispose_op < 3; ++last_fctl_chunk.dispose_op) {
919  // 0: APNG_DISPOSE_OP_NONE
920  // 1: APNG_DISPOSE_OP_BACKGROUND
921  // 2: APNG_DISPOSE_OP_PREVIOUS
922 
923  for (fctl_chunk.blend_op = 0; fctl_chunk.blend_op < 2; ++fctl_chunk.blend_op) {
924  // 0: APNG_BLEND_OP_SOURCE
925  // 1: APNG_BLEND_OP_OVER
926 
927  uint32_t original_sequence_number = s->sequence_number, sequence_number;
928  uint8_t *bytestream_start = s->bytestream;
929  size_t bytestream_size;
930 
931  // Do disposal
932  if (last_fctl_chunk.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
933  diffFrame->width = pict->width;
934  diffFrame->height = pict->height;
935  ret = av_frame_copy(diffFrame, s->last_frame);
936  if (ret < 0)
937  goto fail;
938 
939  if (last_fctl_chunk.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
940  for (y = last_fctl_chunk.y_offset; y < last_fctl_chunk.y_offset + last_fctl_chunk.height; ++y) {
941  size_t row_start = diffFrame->linesize[0] * y + bpp * last_fctl_chunk.x_offset;
942  memset(diffFrame->data[0] + row_start, 0, bpp * last_fctl_chunk.width);
943  }
944  }
945  } else {
946  if (!s->prev_frame)
947  continue;
948 
949  diffFrame->width = pict->width;
950  diffFrame->height = pict->height;
951  ret = av_frame_copy(diffFrame, s->prev_frame);
952  if (ret < 0)
953  goto fail;
954  }
955 
956  // Do inverse blending
957  if (apng_do_inverse_blend(diffFrame, pict, &fctl_chunk, bpp) < 0)
958  continue;
959 
960  // Do encoding
961  ret = encode_frame(avctx, diffFrame);
962  sequence_number = s->sequence_number;
963  s->sequence_number = original_sequence_number;
964  bytestream_size = s->bytestream - bytestream_start;
965  s->bytestream = bytestream_start;
966  if (ret < 0)
967  goto fail;
968 
969  if (bytestream_size < best_bytestream_size) {
970  *best_fctl_chunk = fctl_chunk;
971  *best_last_fctl_chunk = last_fctl_chunk;
972 
973  best_sequence_number = sequence_number;
974  best_bytestream = s->bytestream;
975  best_bytestream_size = bytestream_size;
976 
977  if (best_bytestream == original_bytestream) {
978  s->bytestream = temp_bytestream;
979  s->bytestream_end = temp_bytestream_end;
980  } else {
981  s->bytestream = original_bytestream;
982  s->bytestream_end = original_bytestream_end;
983  }
984  }
985  }
986  }
987 
988  s->sequence_number = best_sequence_number;
989  s->bytestream = original_bytestream + best_bytestream_size;
990  s->bytestream_end = original_bytestream_end;
991  if (best_bytestream != original_bytestream)
992  memcpy(original_bytestream, best_bytestream, best_bytestream_size);
993 
994  ret = 0;
995 
996 fail:
997  av_freep(&temp_bytestream);
998  av_frame_free(&diffFrame);
999  return ret;
1000 }
1001 
1003  const AVFrame *pict, int *got_packet)
1004 {
1005  PNGEncContext *s = avctx->priv_data;
1006  int ret;
1007  int enc_row_size;
1008  uint64_t max_packet_size;
1009  APNGFctlChunk fctl_chunk = {0};
1010 
1011  if (pict && s->color_type == PNG_COLOR_TYPE_PALETTE) {
1012  uint32_t checksum = ~av_crc(av_crc_get_table(AV_CRC_32_IEEE_LE), ~0U, pict->data[1], 256 * sizeof(uint32_t));
1013 
1014  if (avctx->frame_num == 0) {
1015  s->palette_checksum = checksum;
1016  } else if (checksum != s->palette_checksum) {
1017  av_log(avctx, AV_LOG_ERROR,
1018  "Input contains more than one unique palette. APNG does not support multiple palettes.\n");
1019  return -1;
1020  }
1021  }
1022 
1023  enc_row_size = deflateBound(&s->zstream.zstream,
1024  (avctx->width * s->bits_per_pixel + 7) >> 3);
1025  max_packet_size =
1026  FF_INPUT_BUFFER_MIN_SIZE + // headers
1027  avctx->height * (
1028  enc_row_size +
1029  (4 + 12) * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // fdAT * ceil(enc_row_size / IOBUF_SIZE)
1030  );
1031  if (max_packet_size > INT_MAX)
1032  return AVERROR(ENOMEM);
1033 
1034  if (avctx->frame_num == 0) {
1035  if (!pict)
1036  return AVERROR(EINVAL);
1037  uint64_t extradata_size = FF_INPUT_BUFFER_MIN_SIZE;
1038  ret = add_icc_profile_size(avctx, pict, &extradata_size);
1039  if (ret < 0)
1040  return ret;
1041  ret = add_exif_profile_size(avctx, pict, &extradata_size);
1042  if (ret < 0)
1043  return ret;
1044  /* the compiler will optimize this out if UINT64_MAX == SIZE_MAX */
1045  if (extradata_size > SIZE_MAX)
1046  return AVERROR(ENOMEM);
1047  s->bytestream = s->extra_data = av_malloc(extradata_size);
1048  if (!s->extra_data)
1049  return AVERROR(ENOMEM);
1050 
1051  ret = encode_headers(avctx, pict);
1052  if (ret < 0)
1053  return ret;
1054 
1055  s->extra_data_size = s->bytestream - s->extra_data;
1056 
1057  s->last_frame_packet = av_malloc(max_packet_size);
1058  if (!s->last_frame_packet)
1059  return AVERROR(ENOMEM);
1060  } else if (s->last_frame) {
1061  ret = ff_get_encode_buffer(avctx, pkt, s->last_frame_packet_size, 0);
1062  if (ret < 0)
1063  return ret;
1064 
1065  memcpy(pkt->data, s->last_frame_packet, s->last_frame_packet_size);
1066  pkt->pts = s->last_frame->pts;
1067  pkt->duration = s->last_frame->duration;
1068 
1069  ret = ff_encode_reordered_opaque(avctx, pkt, s->last_frame);
1070  if (ret < 0)
1071  return ret;
1072  }
1073 
1074  if (pict) {
1075  s->bytestream_start =
1076  s->bytestream = s->last_frame_packet;
1077  s->bytestream_end = s->bytestream + max_packet_size;
1078 
1079  // We're encoding the frame first, so we have to do a bit of shuffling around
1080  // to have the image data write to the correct place in the buffer
1081  fctl_chunk.sequence_number = s->sequence_number;
1082  ++s->sequence_number;
1083  s->bytestream += APNG_FCTL_CHUNK_SIZE + 12;
1084 
1085  ret = apng_encode_frame(avctx, pict, &fctl_chunk, &s->last_frame_fctl);
1086  if (ret < 0)
1087  return ret;
1088 
1089  fctl_chunk.delay_num = 0; // delay filled in during muxing
1090  fctl_chunk.delay_den = 0;
1091  } else {
1092  s->last_frame_fctl.dispose_op = APNG_DISPOSE_OP_NONE;
1093  }
1094 
1095  if (s->last_frame) {
1096  uint8_t* last_fctl_chunk_start = pkt->data;
1097  uint8_t buf[APNG_FCTL_CHUNK_SIZE];
1098  if (!s->extra_data_updated) {
1099  uint8_t *side_data = av_packet_new_side_data(pkt, AV_PKT_DATA_NEW_EXTRADATA, s->extra_data_size);
1100  if (!side_data)
1101  return AVERROR(ENOMEM);
1102  memcpy(side_data, s->extra_data, s->extra_data_size);
1103  s->extra_data_updated = 1;
1104  }
1105 
1106  AV_WB32(buf + 0, s->last_frame_fctl.sequence_number);
1107  AV_WB32(buf + 4, s->last_frame_fctl.width);
1108  AV_WB32(buf + 8, s->last_frame_fctl.height);
1109  AV_WB32(buf + 12, s->last_frame_fctl.x_offset);
1110  AV_WB32(buf + 16, s->last_frame_fctl.y_offset);
1111  AV_WB16(buf + 20, s->last_frame_fctl.delay_num);
1112  AV_WB16(buf + 22, s->last_frame_fctl.delay_den);
1113  buf[24] = s->last_frame_fctl.dispose_op;
1114  buf[25] = s->last_frame_fctl.blend_op;
1115  png_write_chunk(&last_fctl_chunk_start, MKTAG('f', 'c', 'T', 'L'), buf, sizeof(buf));
1116 
1117  *got_packet = 1;
1118  }
1119 
1120  if (pict) {
1121  if (!s->last_frame) {
1122  s->last_frame = av_frame_alloc();
1123  if (!s->last_frame)
1124  return AVERROR(ENOMEM);
1125  } else if (s->last_frame_fctl.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
1126  if (!s->prev_frame) {
1127  s->prev_frame = av_frame_alloc();
1128  if (!s->prev_frame)
1129  return AVERROR(ENOMEM);
1130 
1131  s->prev_frame->format = pict->format;
1132  s->prev_frame->width = pict->width;
1133  s->prev_frame->height = pict->height;
1134  if ((ret = av_frame_get_buffer(s->prev_frame, 0)) < 0)
1135  return ret;
1136  }
1137 
1138  // Do disposal, but not blending
1139  av_frame_copy(s->prev_frame, s->last_frame);
1140  if (s->last_frame_fctl.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
1141  uint32_t y;
1142  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
1143  for (y = s->last_frame_fctl.y_offset; y < s->last_frame_fctl.y_offset + s->last_frame_fctl.height; ++y) {
1144  size_t row_start = s->prev_frame->linesize[0] * y + bpp * s->last_frame_fctl.x_offset;
1145  memset(s->prev_frame->data[0] + row_start, 0, bpp * s->last_frame_fctl.width);
1146  }
1147  }
1148  }
1149 
1150  ret = av_frame_replace(s->last_frame, pict);
1151  if (ret < 0)
1152  return ret;
1153 
1154  s->last_frame_fctl = fctl_chunk;
1155  s->last_frame_packet_size = s->bytestream - s->bytestream_start;
1156  } else {
1157  av_frame_free(&s->last_frame);
1158  }
1159 
1160  return 0;
1161 }
1162 
1164 {
1165  PNGEncContext *s = avctx->priv_data;
1166  int compression_level;
1167 
1168  switch (avctx->pix_fmt) {
1169  case AV_PIX_FMT_RGBA:
1170  avctx->bits_per_coded_sample = 32;
1171  break;
1172  case AV_PIX_FMT_RGB24:
1173  avctx->bits_per_coded_sample = 24;
1174  break;
1175  case AV_PIX_FMT_GRAY8:
1176  avctx->bits_per_coded_sample = 0x28;
1177  break;
1178  case AV_PIX_FMT_MONOBLACK:
1179  avctx->bits_per_coded_sample = 1;
1180  break;
1181  case AV_PIX_FMT_PAL8:
1182  avctx->bits_per_coded_sample = 8;
1183  }
1184 
1185  ff_llvidencdsp_init(&s->llvidencdsp);
1186 
1187  if (avctx->pix_fmt == AV_PIX_FMT_MONOBLACK)
1188  s->filter_type = PNG_FILTER_VALUE_NONE;
1189 
1190  if (s->dpi && s->dpm) {
1191  av_log(avctx, AV_LOG_ERROR, "Only one of 'dpi' or 'dpm' options should be set\n");
1192  return AVERROR(EINVAL);
1193  } else if (s->dpi) {
1194  s->dpm = s->dpi * 10000 / 254;
1195  }
1196 
1197  s->is_progressive = !!(avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT);
1198  switch (avctx->pix_fmt) {
1199  case AV_PIX_FMT_RGBA64BE:
1200  s->bit_depth = 16;
1201  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1202  break;
1203  case AV_PIX_FMT_RGB48BE:
1204  s->bit_depth = 16;
1205  s->color_type = PNG_COLOR_TYPE_RGB;
1206  break;
1207  case AV_PIX_FMT_RGBA:
1208  s->bit_depth = 8;
1209  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1210  break;
1211  case AV_PIX_FMT_RGB24:
1212  s->bit_depth = 8;
1213  s->color_type = PNG_COLOR_TYPE_RGB;
1214  break;
1215  case AV_PIX_FMT_GRAY16BE:
1216  s->bit_depth = 16;
1217  s->color_type = PNG_COLOR_TYPE_GRAY;
1218  break;
1219  case AV_PIX_FMT_GRAY8:
1220  s->bit_depth = 8;
1221  s->color_type = PNG_COLOR_TYPE_GRAY;
1222  break;
1223  case AV_PIX_FMT_GRAY8A:
1224  s->bit_depth = 8;
1225  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1226  break;
1227  case AV_PIX_FMT_YA16BE:
1228  s->bit_depth = 16;
1229  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1230  break;
1231  case AV_PIX_FMT_MONOBLACK:
1232  s->bit_depth = 1;
1233  s->color_type = PNG_COLOR_TYPE_GRAY;
1234  break;
1235  case AV_PIX_FMT_PAL8:
1236  s->bit_depth = 8;
1237  s->color_type = PNG_COLOR_TYPE_PALETTE;
1238  break;
1239  default:
1240  av_unreachable("Already checked via CODEC_PIXFMTS");
1241  }
1242  s->bits_per_pixel = ff_png_get_nb_channels(s->color_type) * s->bit_depth;
1243 
1244  compression_level = avctx->compression_level == FF_COMPRESSION_DEFAULT
1245  ? Z_DEFAULT_COMPRESSION
1246  : av_clip(avctx->compression_level, 0, 9);
1247  return ff_deflate_init(&s->zstream, compression_level, avctx);
1248 }
1249 
1251 {
1252  PNGEncContext *s = avctx->priv_data;
1253 
1254  ff_deflate_end(&s->zstream);
1255  av_frame_free(&s->last_frame);
1256  av_frame_free(&s->prev_frame);
1257  av_freep(&s->last_frame_packet);
1258  av_freep(&s->extra_data);
1259  s->extra_data_size = 0;
1260  return 0;
1261 }
1262 
1263 #define OFFSET(x) offsetof(PNGEncContext, x)
1264 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
1265 static const AVOption options[] = {
1266  {"dpi", "Set image resolution (in dots per inch)", OFFSET(dpi), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1267  {"dpm", "Set image resolution (in dots per meter)", OFFSET(dpm), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1268  { "pred", "Prediction method", OFFSET(filter_type), AV_OPT_TYPE_INT, { .i64 = PNG_FILTER_VALUE_PAETH }, PNG_FILTER_VALUE_NONE, PNG_FILTER_VALUE_MIXED, VE, .unit = "pred" },
1269  { "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_NONE }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1270  { "sub", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_SUB }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1271  { "up", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_UP }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1272  { "avg", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_AVG }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1273  { "paeth", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_PAETH }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1274  { "mixed", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_MIXED }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1275  { NULL},
1276 };
1277 
1278 static const AVClass pngenc_class = {
1279  .class_name = "(A)PNG encoder",
1280  .item_name = av_default_item_name,
1281  .option = options,
1282  .version = LIBAVUTIL_VERSION_INT,
1283 };
1284 
1286  .p.name = "png",
1287  CODEC_LONG_NAME("PNG (Portable Network Graphics) image"),
1288  .p.type = AVMEDIA_TYPE_VIDEO,
1289  .p.id = AV_CODEC_ID_PNG,
1290  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS |
1292  .priv_data_size = sizeof(PNGEncContext),
1293  .init = png_enc_init,
1294  .close = png_enc_close,
1302  .alpha_modes = (const enum AVAlphaMode[]) {
1304  },
1305  .p.priv_class = &pngenc_class,
1306  .caps_internal = FF_CODEC_CAP_ICC_PROFILES,
1307 };
1308 
1310  .p.name = "apng",
1311  CODEC_LONG_NAME("APNG (Animated Portable Network Graphics) image"),
1312  .p.type = AVMEDIA_TYPE_VIDEO,
1313  .p.id = AV_CODEC_ID_APNG,
1314  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY |
1316  .priv_data_size = sizeof(PNGEncContext),
1317  .init = png_enc_init,
1318  .close = png_enc_close,
1325  .alpha_modes = (const enum AVAlphaMode[]) {
1327  },
1328  .p.priv_class = &pngenc_class,
1329  .caps_internal = FF_CODEC_CAP_ICC_PROFILES,
1330 };
AVFrame::color_trc
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:682
ff_encode_reordered_opaque
int ff_encode_reordered_opaque(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *frame)
Propagate user opaque values from the frame to avctx/pkt as needed.
Definition: encode.c:218
AVMasteringDisplayMetadata::has_primaries
int has_primaries
Flag indicating whether the display primaries (and white point) are set.
Definition: mastering_display_metadata.h:62
CODEC_PIXFMTS
#define CODEC_PIXFMTS(...)
Definition: codec_internal.h:391
encode_frame
static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:527
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AVFrame::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:678
AVMasteringDisplayMetadata::max_luminance
AVRational max_luminance
Max luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:57
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
entry
#define entry
Definition: aom_film_grain_template.c:66
av_clip
#define av_clip
Definition: common.h:100
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
AVALPHA_MODE_STRAIGHT
@ AVALPHA_MODE_STRAIGHT
Alpha channel is independent of color values.
Definition: pixfmt.h:803
PNGEncContext::buf
uint8_t buf[IOBUF_SIZE]
Definition: pngenc.c:66
AV_WL32
#define AV_WL32(p, v)
Definition: intreadwrite.h:422
AVColorTransferCharacteristic
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:661
libm.h
ff_png_encoder
const FFCodec ff_png_encoder
Definition: pngenc.c:1285
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:206
av_frame_get_side_data
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:659
AVColorPrimariesDesc
Struct that contains both white point location and primaries location, providing the complete descrip...
Definition: csp.h:78
AVCRC
uint32_t AVCRC
Definition: crc.h:46
png_get_chrm
static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
Definition: pngenc.c:306
AV_PKT_DATA_NEW_EXTRADATA
@ AV_PKT_DATA_NEW_EXTRADATA
The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was...
Definition: packet.h:56
APNG_FCTL_CHUNK_SIZE
#define APNG_FCTL_CHUNK_SIZE
Definition: apng.h:42
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
ff_png_get_nb_channels
int ff_png_get_nb_channels(int color_type)
Definition: png.c:41
PNGEncContext::bits_per_pixel
int bits_per_pixel
Definition: pngenc.c:73
AVMasteringDisplayMetadata::display_primaries
AVRational display_primaries[3][2]
CIE 1931 xy chromaticity coords of color primaries (r, g, b order).
Definition: mastering_display_metadata.h:42
src1
const pixel * src1
Definition: h264pred_template.c:420
AVMasteringDisplayMetadata::has_luminance
int has_luminance
Flag indicating whether the luminance (min_ and max_) have been set.
Definition: mastering_display_metadata.h:67
rational.h
PNGEncContext::last_frame
AVFrame * last_frame
Definition: pngenc.c:83
int64_t
long long int64_t
Definition: coverity.c:34
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:226
AVFrame::color_primaries
enum AVColorPrimaries color_primaries
Definition: frame.h:680
mask
int mask
Definition: mediacodecdec_common.c:154
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
apng_encode_frame
static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict, APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
Definition: pngenc.c:873
AVContentLightMetadata::MaxCLL
unsigned MaxCLL
Max content light level (cd/m^2).
Definition: mastering_display_metadata.h:111
APNGFctlChunk::delay_num
uint16_t delay_num
Definition: pngenc.c:51
test::height
int height
Definition: vc1dsp.c:40
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
AV_PIX_FMT_RGBA64BE
@ AV_PIX_FMT_RGBA64BE
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
Definition: pixfmt.h:202
AVFrame::width
int width
Definition: frame.h:499
PNG_FILTER_VALUE_MIXED
#define PNG_FILTER_VALUE_MIXED
Definition: png.h:45
w
uint8_t w
Definition: llviddspenc.c:38
AVPacket::data
uint8_t * data
Definition: packet.h:558
AVOption
AVOption.
Definition: opt.h:429
encode.h
b
#define b
Definition: input.c:42
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:664
data
const char data[16]
Definition: mxf.c:149
png_write_row
static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
Definition: pngenc.c:281
FFCodec
Definition: codec_internal.h:127
output_data
static int output_data(MLPDecodeContext *m, unsigned int substr, AVFrame *frame, int *got_frame_ptr)
Write the audio data into the output buffer.
Definition: mlpdec.c:1109
PNGEncContext::dpm
int dpm
Physical pixel density, in dots per meter, if set.
Definition: pngenc.c:68
AV_FRAME_DATA_DISPLAYMATRIX
@ AV_FRAME_DATA_DISPLAYMATRIX
This side data contains a 3x3 transformation matrix describing an affine transformation that needs to...
Definition: frame.h:85
AVPacket::duration
int64_t duration
Duration of this packet in AVStream->time_base units, 0 if unknown.
Definition: packet.h:576
png_get_gama
static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
Definition: pngenc.c:324
PNGEncContext::last_frame_packet
uint8_t * last_frame_packet
Definition: pngenc.c:85
AVColorPrimaries
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:636
ff_deflate_end
void ff_deflate_end(FFZStream *zstream)
Wrapper around deflateEnd().
AV_CODEC_ID_APNG
@ AV_CODEC_ID_APNG
Definition: codec_id.h:269
FF_COMPRESSION_DEFAULT
#define FF_COMPRESSION_DEFAULT
Definition: avcodec.h:1224
APNG_DISPOSE_OP_BACKGROUND
@ APNG_DISPOSE_OP_BACKGROUND
Definition: apng.h:32
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:613
FF_INPUT_BUFFER_MIN_SIZE
#define FF_INPUT_BUFFER_MIN_SIZE
Used by some encoders as upper bound for the length of headers.
Definition: encode.h:33
AV_WB64
#define AV_WB64(p, v)
Definition: intreadwrite.h:429
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:448
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:31
NB_PASSES
#define NB_PASSES
Definition: png.h:47
AVContentLightMetadata
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
Definition: mastering_display_metadata.h:107
crc.h
ff_apng_encoder
const FFCodec ff_apng_encoder
Definition: pngenc.c:1309
sub_png_paeth_prediction
static void sub_png_paeth_prediction(uint8_t *dst, const uint8_t *src, const uint8_t *top, int w, int bpp)
Definition: pngenc.c:128
AV_PIX_FMT_GRAY16BE
@ AV_PIX_FMT_GRAY16BE
Y , 16bpp, big-endian.
Definition: pixfmt.h:104
close
static av_cold void close(AVCodecParserContext *s)
Definition: apv_parser.c:135
AV_STEREO3D_SIDEBYSIDE
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
Definition: stereo3d.h:64
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
PNGEncContext::prev_frame
AVFrame * prev_frame
Definition: pngenc.c:82
AVCOL_TRC_IEC61966_2_1
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:675
ff_png_pass_row_size
int ff_png_pass_row_size(int pass, int bits_per_pixel, int width)
Definition: png.c:54
fail
#define fail()
Definition: checkasm.h:206
AV_STEREO3D_2D
@ AV_STEREO3D_2D
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:52
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:488
APNGFctlChunk::blend_op
uint8_t blend_op
Definition: pngenc.c:52
FF_CODEC_ENCODE_CB
#define FF_CODEC_ENCODE_CB(func)
Definition: codec_internal.h:358
AVRational::num
int num
Numerator.
Definition: rational.h:59
encode_png
static int encode_png(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:677
PNG_COLOR_TYPE_RGB_ALPHA
#define PNG_COLOR_TYPE_RGB_ALPHA
Definition: png.h:36
AV_CODEC_FLAG_INTERLACED_DCT
#define AV_CODEC_FLAG_INTERLACED_DCT
Use interlaced DCT.
Definition: avcodec.h:310
png_filter_row
static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type, const uint8_t *src, const uint8_t *top, int size, int bpp)
Definition: pngenc.c:172
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:52
avassert.h
pkt
AVPacket * pkt
Definition: movenc.c:60
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
zlib_wrapper.h
AVFrameSideData::size
size_t size
Definition: frame.h:285
av_cold
#define av_cold
Definition: attributes.h:106
encode_apng
static int encode_apng(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:1002
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:60
PNGEncContext::bytestream_end
uint8_t * bytestream_end
Definition: pngenc.c:61
stereo3d.h
AVMasteringDisplayMetadata::white_point
AVRational white_point[2]
CIE 1931 xy chromaticity coords of white point.
Definition: mastering_display_metadata.h:47
s
#define s(width, name)
Definition: cbs_vp9.c:198
av_csp_primaries_desc_from_id
const AVColorPrimariesDesc * av_csp_primaries_desc_from_id(enum AVColorPrimaries prm)
Retrieves a complete gamut description from an enum constant describing the color primaries.
Definition: csp.c:90
png_write_chunk
static void png_write_chunk(uint8_t **f, uint32_t tag, const uint8_t *buf, int length)
Definition: pngenc.c:233
APNG_BLEND_OP_SOURCE
@ APNG_BLEND_OP_SOURCE
Definition: apng.h:37
PNG_COLOR_TYPE_RGB
#define PNG_COLOR_TYPE_RGB
Definition: png.h:35
AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE
#define AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE
This encoder can reorder user opaque values from input AVFrames and return them with corresponding ou...
Definition: codec.h:144
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:41
AVCodecContext::bits_per_raw_sample
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:1553
PNG_Q2D
#define PNG_Q2D(q, divisor)
Definition: pngenc.c:304
png_write_image_data
static void png_write_image_data(AVCodecContext *avctx, const uint8_t *buf, int length)
Definition: pngenc.c:253
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:331
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:100
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:441
AVStereo3D::flags
int flags
Additional information about the frame packing.
Definition: stereo3d.h:212
AV_CODEC_ID_PNG
@ AV_CODEC_ID_PNG
Definition: codec_id.h:113
if
if(ret)
Definition: filter_design.txt:179
PNGEncContext
Definition: pngenc.c:55
APNGFctlChunk::y_offset
uint32_t y_offset
Definition: pngenc.c:50
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:95
AV_PIX_FMT_GRAY8A
@ AV_PIX_FMT_GRAY8A
alias for AV_PIX_FMT_YA8
Definition: pixfmt.h:143
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
APNGFctlChunk::delay_den
uint16_t delay_den
Definition: pngenc.c:51
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
NULL
#define NULL
Definition: coverity.c:32
exif_internal.h
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_EXIF_TIFF_HEADER
@ AV_EXIF_TIFF_HEADER
The TIFF header starts with 0x49492a00, or 0x4d4d002a.
Definition: exif.h:63
apng.h
AV_WB16
#define AV_WB16(p, v)
Definition: intreadwrite.h:401
av_unreachable
#define av_unreachable(msg)
Asserts that are used as compiler optimization hints depending upon ASSERT_LEVEL and NBDEBUG.
Definition: avassert.h:108
IOBUF_SIZE
#define IOBUF_SIZE
Definition: pngenc.c:45
AV_PIX_FMT_MONOBLACK
@ AV_PIX_FMT_MONOBLACK
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb.
Definition: pixfmt.h:83
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:638
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:241
apng_do_inverse_blend
static int apng_do_inverse_blend(AVFrame *output, const AVFrame *input, APNGFctlChunk *fctl_chunk, uint8_t bpp)
Definition: pngenc.c:727
APNGFctlChunk::width
uint32_t width
Definition: pngenc.c:49
png_enc_close
static av_cold int png_enc_close(AVCodecContext *avctx)
Definition: pngenc.c:1250
AV_FRAME_DATA_ICC_PROFILE
@ AV_FRAME_DATA_ICC_PROFILE
The data contains an ICC profile as an opaque octet buffer following the format described by ISO 1507...
Definition: frame.h:144
APNG_DISPOSE_OP_PREVIOUS
@ APNG_DISPOSE_OP_PREVIOUS
Definition: apng.h:33
PNG_COLOR_TYPE_GRAY
#define PNG_COLOR_TYPE_GRAY
Definition: png.h:33
options
Definition: swscale.c:43
deflate
static void deflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:161
PNGEncContext::filter_type
int filter_type
Definition: pngenc.c:63
AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
@ AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
Mastering display metadata associated with a video frame.
Definition: frame.h:120
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
PNGEncContext::extra_data_updated
int extra_data_updated
Definition: pngenc.c:78
APNGFctlChunk
Definition: pngenc.c:47
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ff_png_pass_ymask
const uint8_t ff_png_pass_ymask[NB_PASSES]
Definition: png.c:27
add_exif_profile_size
static int add_exif_profile_size(AVCodecContext *avctx, const AVFrame *pict, uint64_t *max_packet_size)
Definition: pngenc.c:649
ff_llvidencdsp_init
av_cold void ff_llvidencdsp_init(LLVidEncDSPContext *c)
Definition: lossless_videoencdsp.c:100
add_icc_profile_size
static int add_icc_profile_size(AVCodecContext *avctx, const AVFrame *pict, uint64_t *max_packet_size)
Definition: pngenc.c:621
APNGFctlChunk::sequence_number
uint32_t sequence_number
Definition: pngenc.c:48
AV_WB32
#define AV_WB32(p, v)
Definition: intreadwrite.h:415
PNGEncContext::zstream
FFZStream zstream
Definition: pngenc.c:65
AVAlphaMode
AVAlphaMode
Correlation between the alpha channel and color values.
Definition: pixfmt.h:800
test::width
int width
Definition: vc1dsp.c:39
PNG_FILTER_VALUE_NONE
#define PNG_FILTER_VALUE_NONE
Definition: png.h:40
f
f
Definition: af_crystalizer.c:122
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:368
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:75
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:559
codec_internal.h
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:87
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:711
av_bswap32
#define av_bswap32
Definition: bswap.h:47
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:424
AV_PIX_FMT_YA16BE
@ AV_PIX_FMT_YA16BE
16 bits gray, 16 bits alpha (big-endian)
Definition: pixfmt.h:209
PNGEncContext::last_frame_packet_size
size_t last_frame_packet_size
Definition: pngenc.c:86
PNG_FILTER_VALUE_AVG
#define PNG_FILTER_VALUE_AVG
Definition: png.h:43
size
int size
Definition: twinvq_data.h:10344
av_csp_approximate_trc_gamma
double av_csp_approximate_trc_gamma(enum AVColorTransferCharacteristic trc)
Determine a suitable 'gamma' value to match the supplied AVColorTransferCharacteristic.
Definition: csp.c:149
MKBETAG
#define MKBETAG(a, b, c, d)
Definition: macros.h:56
PNGEncContext::llvidencdsp
LLVidEncDSPContext llvidencdsp
Definition: pngenc.c:57
APNG_DISPOSE_OP_NONE
@ APNG_DISPOSE_OP_NONE
Definition: apng.h:31
AVFrameSideData::data
uint8_t * data
Definition: frame.h:284
PNG_FILTER_VALUE_PAETH
#define PNG_FILTER_VALUE_PAETH
Definition: png.h:44
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:514
PNGEncContext::extra_data
uint8_t * extra_data
Definition: pngenc.c:79
png_choose_filter
static uint8_t * png_choose_filter(PNGEncContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *top, int size, int bpp)
Definition: pngenc.c:203
buffer.h
PNG_FILTER_VALUE_UP
#define PNG_FILTER_VALUE_UP
Definition: png.h:42
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
csp.h
av_crc_get_table
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
OFFSET
#define OFFSET(x)
Definition: pngenc.c:1263
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:564
AV_STEREO3D_FLAG_INVERT
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:194
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
PNGSIG
#define PNGSIG
Definition: png.h:49
AVBufferRef::size
size_t size
Size of data in bytes.
Definition: buffer.h:94
lossless_videoencdsp.h
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1546
PNG_FILTER_VALUE_SUB
#define PNG_FILTER_VALUE_SUB
Definition: png.h:41
AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
@ AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
Content light level (based on CTA-861.3).
Definition: frame.h:137
AV_PIX_FMT_RGB48BE
@ AV_PIX_FMT_RGB48BE
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:109
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:551
options
static const AVOption options[]
Definition: pngenc.c:1265
src2
const pixel * src2
Definition: h264pred_template.c:421
AV_FRAME_DATA_STEREO3D
@ AV_FRAME_DATA_STEREO3D
Stereoscopic 3d metadata.
Definition: frame.h:64
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:179
AVMasteringDisplayMetadata
Mastering display metadata capable of representing the color volume of the display used to master the...
Definition: mastering_display_metadata.h:38
len
int len
Definition: vorbis_enc_data.h:426
AVCodecContext::height
int height
Definition: avcodec.h:592
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:631
LLVidEncDSPContext
Definition: lossless_videoencdsp.h:25
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:750
FF_CODEC_CAP_ICC_PROFILES
#define FF_CODEC_CAP_ICC_PROFILES
Codec supports embedded ICC profiles (AV_FRAME_DATA_ICC_PROFILE).
Definition: codec_internal.h:81
sub_left_prediction
static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
Definition: pngenc.c:156
PNGEncContext::color_type
int color_type
Definition: pngenc.c:72
avcodec.h
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
AVCodecContext::frame_num
int64_t frame_num
Frame counter, set by libavcodec.
Definition: avcodec.h:1878
bound
static double bound(const double threshold, const double val)
Definition: af_dynaudnorm.c:413
tag
uint32_t tag
Definition: movenc.c:1957
ret
ret
Definition: filter_design.txt:187
pred
static const float pred[4]
Definition: siprdata.h:259
PNGEncContext::extra_data_size
int extra_data_size
Definition: pngenc.c:80
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVALPHA_MODE_UNSPECIFIED
@ AVALPHA_MODE_UNSPECIFIED
Unknown alpha handling, or no alpha channel.
Definition: pixfmt.h:801
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:81
AVStereo3D::type
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:207
PNGEncContext::bit_depth
int bit_depth
Definition: pngenc.c:71
PNG_LRINT
#define PNG_LRINT(d, divisor)
Definition: pngenc.c:303
PNGEncContext::bytestream_start
uint8_t * bytestream_start
Definition: pngenc.c:60
AV_INPUT_BUFFER_PADDING_SIZE
#define AV_INPUT_BUFFER_PADDING_SIZE
Definition: defs.h:40
U
#define U(x)
Definition: vpx_arith.h:37
av_frame_replace
int av_frame_replace(AVFrame *dst, const AVFrame *src)
Ensure the destination frame refers to the same data described by the source frame,...
Definition: frame.c:376
AVCodecContext
main external API structure.
Definition: avcodec.h:431
AVFrame::height
int height
Definition: frame.h:499
av_packet_new_side_data
uint8_t * av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, size_t size)
Allocate new information of a packet.
Definition: packet.c:232
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:104
av_crc
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
png_get_interlaced_row
static void png_get_interlaced_row(uint8_t *dst, int row_size, int bits_per_pixel, int pass, const uint8_t *src, int width)
Definition: pngenc.c:89
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:76
PNG_COLOR_MASK_PALETTE
#define PNG_COLOR_MASK_PALETTE
Definition: png.h:29
Windows::Graphics::DirectX::Direct3D11::p
IDirect3DDxgiInterfaceAccess _COM_Outptr_ void ** p
Definition: vsrc_gfxcapture_winrt.hpp:53
AVMasteringDisplayMetadata::min_luminance
AVRational min_luminance
Min luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:52
AV_WB32_PNG_D
#define AV_WB32_PNG_D(buf, q)
Definition: pngenc.c:305
AV_CRC_32_IEEE_LE
@ AV_CRC_32_IEEE_LE
Definition: crc.h:53
PNGEncContext::last_frame_fctl
APNGFctlChunk last_frame_fctl
Definition: pngenc.c:84
desc
const char * desc
Definition: libsvtav1.c:79
PNGEncContext::dpi
int dpi
Physical pixel density, in dots per inch, if set.
Definition: pngenc.c:67
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
FFZStream
Definition: zlib_wrapper.h:27
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
mastering_display_metadata.h
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:282
png_enc_init
static av_cold int png_enc_init(AVCodecContext *avctx)
Definition: pngenc.c:1163
AVDictionaryEntry
Definition: dict.h:90
png_write_iccp
static int png_write_iccp(PNGEncContext *s, const AVFrameSideData *sd)
Definition: pngenc.c:334
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
AVPacket
This structure stores compressed data.
Definition: packet.h:535
AVContentLightMetadata::MaxFALL
unsigned MaxFALL
Max average light level per frame (cd/m^2).
Definition: mastering_display_metadata.h:116
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:458
png.h
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
ff_exif_get_buffer
int ff_exif_get_buffer(void *logctx, const AVFrame *frame, AVBufferRef **buffer_ptr, enum AVExifHeaderMode header_mode)
Gets all relevant side data, collects it into an IFD, and writes it into the corresponding buffer poi...
Definition: exif.c:1360
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:592
AV_FRAME_DATA_EXIF
@ AV_FRAME_DATA_EXIF
Extensible image file format metadata.
Definition: frame.h:262
bytestream.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:472
PNG_COLOR_TYPE_GRAY_ALPHA
#define PNG_COLOR_TYPE_GRAY_ALPHA
Definition: png.h:37
AVFrameSideData::metadata
AVDictionary * metadata
Definition: frame.h:286
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
APNGFctlChunk::height
uint32_t height
Definition: pngenc.c:49
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
MKTAG
#define MKTAG(a, b, c, d)
Definition: macros.h:55
AVStereo3D
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
Definition: stereo3d.h:203
width
#define width
Definition: dsp.h:89
input_data
static void input_data(MLPEncodeContext *ctx, MLPSubstream *s, uint8_t **const samples, int nb_samples)
Wrapper function for inputting data in two different bit-depths.
Definition: mlpenc.c:1224
PNGEncContext::bytestream
uint8_t * bytestream
Definition: pngenc.c:59
PNGEncContext::is_progressive
int is_progressive
Definition: pngenc.c:70
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
VE
#define VE
Definition: pngenc.c:1264
ff_alloc_packet
int ff_alloc_packet(AVCodecContext *avctx, AVPacket *avpkt, int64_t size)
Check AVPacket size and allocate data.
Definition: encode.c:60
encode_headers
static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:376
APNGFctlChunk::dispose_op
uint8_t dispose_op
Definition: pngenc.c:52
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:616
PNGEncContext::palette_checksum
uint32_t palette_checksum
Definition: pngenc.c:76
PNG_COLOR_TYPE_PALETTE
#define PNG_COLOR_TYPE_PALETTE
Definition: png.h:34
src
#define src
Definition: vp8dsp.c:248
APNGFctlChunk::x_offset
uint32_t x_offset
Definition: pngenc.c:50
ff_deflate_init
int ff_deflate_init(FFZStream *zstream, int level, void *logctx)
Wrapper around deflateInit().
PNGEncContext::sequence_number
uint32_t sequence_number
Definition: pngenc.c:77
AVCodecContext::compression_level
int compression_level
Definition: avcodec.h:1223
pngenc_class
static const AVClass pngenc_class
Definition: pngenc.c:1278