FFmpeg
pngenc.c
Go to the documentation of this file.
1 /*
2  * PNG image format
3  * Copyright (c) 2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include "avcodec.h"
23 #include "codec_internal.h"
24 #include "encode.h"
25 #include "bytestream.h"
26 #include "lossless_videoencdsp.h"
27 #include "png.h"
28 #include "apng.h"
29 #include "zlib_wrapper.h"
30 
31 #include "libavutil/avassert.h"
32 #include "libavutil/crc.h"
33 #include "libavutil/csp.h"
34 #include "libavutil/libm.h"
36 #include "libavutil/mem.h"
37 #include "libavutil/opt.h"
38 #include "libavutil/rational.h"
39 #include "libavutil/stereo3d.h"
40 
41 #include <zlib.h>
42 
43 #define IOBUF_SIZE 4096
44 
45 typedef struct APNGFctlChunk {
46  uint32_t sequence_number;
47  uint32_t width, height;
48  uint32_t x_offset, y_offset;
49  uint16_t delay_num, delay_den;
50  uint8_t dispose_op, blend_op;
52 
53 typedef struct PNGEncContext {
54  AVClass *class;
56 
57  uint8_t *bytestream;
58  uint8_t *bytestream_start;
59  uint8_t *bytestream_end;
60 
62 
64  uint8_t buf[IOBUF_SIZE];
65  int dpi; ///< Physical pixel density, in dots per inch, if set
66  int dpm; ///< Physical pixel density, in dots per meter, if set
67 
69  int bit_depth;
72 
73  // APNG
74  uint32_t palette_checksum; // Used to ensure a single unique palette
75  uint32_t sequence_number;
77  uint8_t *extra_data;
79 
86 
87 static void png_get_interlaced_row(uint8_t *dst, int row_size,
88  int bits_per_pixel, int pass,
89  const uint8_t *src, int width)
90 {
91  int x, mask, dst_x, j, b, bpp;
92  uint8_t *d;
93  const uint8_t *s;
94  static const int masks[] = {0x80, 0x08, 0x88, 0x22, 0xaa, 0x55, 0xff};
95 
96  mask = masks[pass];
97  switch (bits_per_pixel) {
98  case 1:
99  memset(dst, 0, row_size);
100  dst_x = 0;
101  for (x = 0; x < width; x++) {
102  j = (x & 7);
103  if ((mask << j) & 0x80) {
104  b = (src[x >> 3] >> (7 - j)) & 1;
105  dst[dst_x >> 3] |= b << (7 - (dst_x & 7));
106  dst_x++;
107  }
108  }
109  break;
110  default:
111  bpp = bits_per_pixel >> 3;
112  d = dst;
113  s = src;
114  for (x = 0; x < width; x++) {
115  j = x & 7;
116  if ((mask << j) & 0x80) {
117  memcpy(d, s, bpp);
118  d += bpp;
119  }
120  s += bpp;
121  }
122  break;
123  }
124 }
125 
126 static void sub_png_paeth_prediction(uint8_t *dst, const uint8_t *src, const uint8_t *top,
127  int w, int bpp)
128 {
129  int i;
130  for (i = 0; i < w; i++) {
131  int a, b, c, p, pa, pb, pc;
132 
133  a = src[i - bpp];
134  b = top[i];
135  c = top[i - bpp];
136 
137  p = b - c;
138  pc = a - c;
139 
140  pa = abs(p);
141  pb = abs(pc);
142  pc = abs(p + pc);
143 
144  if (pa <= pb && pa <= pc)
145  p = a;
146  else if (pb <= pc)
147  p = b;
148  else
149  p = c;
150  dst[i] = src[i] - p;
151  }
152 }
153 
154 static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
155 {
156  const uint8_t *src1 = src + bpp;
157  const uint8_t *src2 = src;
158  int x, unaligned_w;
159 
160  memcpy(dst, src, bpp);
161  dst += bpp;
162  size -= bpp;
163  unaligned_w = FFMIN(32 - bpp, size);
164  for (x = 0; x < unaligned_w; x++)
165  *dst++ = *src1++ - *src2++;
166  size -= unaligned_w;
167  c->llvidencdsp.diff_bytes(dst, src1, src2, size);
168 }
169 
170 static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type,
171  const uint8_t *src, const uint8_t *top, int size, int bpp)
172 {
173  int i;
174 
175  switch (filter_type) {
177  memcpy(dst, src, size);
178  break;
180  sub_left_prediction(c, dst, src, bpp, size);
181  break;
182  case PNG_FILTER_VALUE_UP:
183  c->llvidencdsp.diff_bytes(dst, src, top, size);
184  break;
186  for (i = 0; i < bpp; i++)
187  dst[i] = src[i] - (top[i] >> 1);
188  for (; i < size; i++)
189  dst[i] = src[i] - ((src[i - bpp] + top[i]) >> 1);
190  break;
192  for (i = 0; i < bpp; i++)
193  dst[i] = src[i] - top[i];
194  sub_png_paeth_prediction(dst + i, src + i, top + i, size - i, bpp);
195  break;
196  }
197 }
198 
199 static uint8_t *png_choose_filter(PNGEncContext *s, uint8_t *dst,
200  const uint8_t *src, const uint8_t *top, int size, int bpp)
201 {
202  int pred = s->filter_type;
203  av_assert0(bpp || !pred);
204  if (!top && pred)
206  if (pred == PNG_FILTER_VALUE_MIXED) {
207  int i;
208  int cost, bcost = INT_MAX;
209  uint8_t *buf1 = dst, *buf2 = dst + size + 16;
210  for (pred = 0; pred < 5; pred++) {
211  png_filter_row(s, buf1 + 1, pred, src, top, size, bpp);
212  buf1[0] = pred;
213  cost = 0;
214  for (i = 0; i <= size; i++)
215  cost += abs((int8_t) buf1[i]);
216  if (cost < bcost) {
217  bcost = cost;
218  FFSWAP(uint8_t *, buf1, buf2);
219  }
220  }
221  return buf2;
222  } else {
223  png_filter_row(s, dst + 1, pred, src, top, size, bpp);
224  dst[0] = pred;
225  return dst;
226  }
227 }
228 
229 static void png_write_chunk(uint8_t **f, uint32_t tag,
230  const uint8_t *buf, int length)
231 {
232  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
233  uint32_t crc = ~0U;
234  uint8_t tagbuf[4];
235 
236  bytestream_put_be32(f, length);
237  AV_WL32(tagbuf, tag);
238  crc = av_crc(crc_table, crc, tagbuf, 4);
239  bytestream_put_be32(f, av_bswap32(tag));
240  if (length > 0) {
241  crc = av_crc(crc_table, crc, buf, length);
242  if (*f != buf)
243  memcpy(*f, buf, length);
244  *f += length;
245  }
246  bytestream_put_be32(f, ~crc);
247 }
248 
250  const uint8_t *buf, int length)
251 {
252  PNGEncContext *s = avctx->priv_data;
253  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
254  uint32_t crc = ~0U;
255 
256  if (avctx->codec_id == AV_CODEC_ID_PNG || avctx->frame_num == 0) {
257  png_write_chunk(&s->bytestream, MKTAG('I', 'D', 'A', 'T'), buf, length);
258  return;
259  }
260 
261  bytestream_put_be32(&s->bytestream, length + 4);
262 
263  bytestream_put_be32(&s->bytestream, MKBETAG('f', 'd', 'A', 'T'));
264  bytestream_put_be32(&s->bytestream, s->sequence_number);
265  crc = av_crc(crc_table, crc, s->bytestream - 8, 8);
266 
267  crc = av_crc(crc_table, crc, buf, length);
268  memcpy(s->bytestream, buf, length);
269  s->bytestream += length;
270 
271  bytestream_put_be32(&s->bytestream, ~crc);
272 
273  ++s->sequence_number;
274 }
275 
276 /* XXX: do filtering */
277 static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
278 {
279  PNGEncContext *s = avctx->priv_data;
280  z_stream *const zstream = &s->zstream.zstream;
281  int ret;
282 
283  zstream->avail_in = size;
284  zstream->next_in = data;
285  while (zstream->avail_in > 0) {
286  ret = deflate(zstream, Z_NO_FLUSH);
287  if (ret != Z_OK)
288  return -1;
289  if (zstream->avail_out == 0) {
290  if (s->bytestream_end - s->bytestream > IOBUF_SIZE + 100)
291  png_write_image_data(avctx, s->buf, IOBUF_SIZE);
292  zstream->avail_out = IOBUF_SIZE;
293  zstream->next_out = s->buf;
294  }
295  }
296  return 0;
297 }
298 
299 #define PNG_LRINT(d, divisor) lrint((d) * (divisor))
300 #define PNG_Q2D(q, divisor) PNG_LRINT(av_q2d(q), (divisor))
301 #define AV_WB32_PNG_D(buf, q) AV_WB32(buf, PNG_Q2D(q, 100000))
302 static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
303 {
305  if (!desc)
306  return 0;
307 
308  AV_WB32_PNG_D(buf, desc->wp.x);
309  AV_WB32_PNG_D(buf + 4, desc->wp.y);
310  AV_WB32_PNG_D(buf + 8, desc->prim.r.x);
311  AV_WB32_PNG_D(buf + 12, desc->prim.r.y);
312  AV_WB32_PNG_D(buf + 16, desc->prim.g.x);
313  AV_WB32_PNG_D(buf + 20, desc->prim.g.y);
314  AV_WB32_PNG_D(buf + 24, desc->prim.b.x);
315  AV_WB32_PNG_D(buf + 28, desc->prim.b.y);
316 
317  return 1;
318 }
319 
320 static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
321 {
322  double gamma = av_csp_approximate_trc_gamma(trc);
323  if (gamma <= 1e-6)
324  return 0;
325 
326  AV_WB32(buf, PNG_LRINT(1.0 / gamma, 100000));
327  return 1;
328 }
329 
331 {
332  z_stream *const zstream = &s->zstream.zstream;
333  const AVDictionaryEntry *entry;
334  const char *name;
335  uint8_t *start, *buf;
336  int ret;
337 
338  if (!sd || !sd->size)
339  return 0;
340  zstream->next_in = sd->data;
341  zstream->avail_in = sd->size;
342 
343  /* write the chunk contents first */
344  start = s->bytestream + 8; /* make room for iCCP tag + length */
345  buf = start;
346 
347  /* profile description */
348  entry = av_dict_get(sd->metadata, "name", NULL, 0);
349  name = (entry && entry->value[0]) ? entry->value : "icc";
350  for (int i = 0;; i++) {
351  char c = (i == 79) ? 0 : name[i];
352  bytestream_put_byte(&buf, c);
353  if (!c)
354  break;
355  }
356 
357  /* compression method and profile data */
358  bytestream_put_byte(&buf, 0);
359  zstream->next_out = buf;
360  zstream->avail_out = s->bytestream_end - buf;
361  ret = deflate(zstream, Z_FINISH);
362  deflateReset(zstream);
363  if (ret != Z_STREAM_END)
364  return AVERROR_EXTERNAL;
365 
366  /* rewind to the start and write the chunk header/crc */
367  png_write_chunk(&s->bytestream, MKTAG('i', 'C', 'C', 'P'), start,
368  zstream->next_out - start);
369  return 0;
370 }
371 
372 static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
373 {
374  AVFrameSideData *side_data;
375  PNGEncContext *s = avctx->priv_data;
376  int ret;
377 
378  /* write png header */
379  AV_WB32(s->buf, avctx->width);
380  AV_WB32(s->buf + 4, avctx->height);
381  s->buf[8] = s->bit_depth;
382  s->buf[9] = s->color_type;
383  s->buf[10] = 0; /* compression type */
384  s->buf[11] = 0; /* filter type */
385  s->buf[12] = s->is_progressive; /* interlace type */
386  png_write_chunk(&s->bytestream, MKTAG('I', 'H', 'D', 'R'), s->buf, 13);
387 
388  /* write physical information */
389  if (s->dpm) {
390  AV_WB32(s->buf, s->dpm);
391  AV_WB32(s->buf + 4, s->dpm);
392  s->buf[8] = 1; /* unit specifier is meter */
393  } else {
394  AV_WB32(s->buf, avctx->sample_aspect_ratio.num);
395  AV_WB32(s->buf + 4, avctx->sample_aspect_ratio.den);
396  s->buf[8] = 0; /* unit specifier is unknown */
397  }
398  png_write_chunk(&s->bytestream, MKTAG('p', 'H', 'Y', 's'), s->buf, 9);
399 
400  /* write stereoscopic information */
402  if (side_data) {
403  AVStereo3D *stereo3d = (AVStereo3D *)side_data->data;
404  switch (stereo3d->type) {
406  s->buf[0] = ((stereo3d->flags & AV_STEREO3D_FLAG_INVERT) == 0) ? 1 : 0;
407  png_write_chunk(&s->bytestream, MKTAG('s', 'T', 'E', 'R'), s->buf, 1);
408  break;
409  case AV_STEREO3D_2D:
410  break;
411  default:
412  av_log(avctx, AV_LOG_WARNING, "Only side-by-side stereo3d flag can be defined within sTER chunk\n");
413  break;
414  }
415  }
416 
418  if ((ret = png_write_iccp(s, side_data)))
419  return ret;
420 
421  /* write colorspace information */
422  if (pict->color_primaries == AVCOL_PRI_BT709 &&
424  s->buf[0] = 1; /* rendering intent, relative colorimetric by default */
425  png_write_chunk(&s->bytestream, MKTAG('s', 'R', 'G', 'B'), s->buf, 1);
426  } else if (pict->color_trc != AVCOL_TRC_UNSPECIFIED && !side_data) {
427  /*
428  * Avoid writing cICP if the transfer is unknown. Known primaries
429  * with unknown transfer can be handled by cHRM.
430  *
431  * We also avoid writing cICP if an ICC Profile is present, because
432  * the standard requires that cICP overrides iCCP.
433  *
434  * These values match H.273 so no translation is needed.
435  */
436  s->buf[0] = pict->color_primaries;
437  s->buf[1] = pict->color_trc;
438  s->buf[2] = 0; /* colorspace = RGB */
439  s->buf[3] = pict->color_range == AVCOL_RANGE_MPEG ? 0 : 1;
440  png_write_chunk(&s->bytestream, MKTAG('c', 'I', 'C', 'P'), s->buf, 4);
441  }
442 
444  if (side_data) {
445  AVContentLightMetadata *clli = (AVContentLightMetadata *) side_data->data;
446  AV_WB32(s->buf, clli->MaxCLL * 10000);
447  AV_WB32(s->buf + 4, clli->MaxFALL * 10000);
448  png_write_chunk(&s->bytestream, MKTAG('c', 'L', 'L', 'i'), s->buf, 8);
449  }
450 
452  if (side_data) {
454  if (mdcv->has_luminance && mdcv->has_primaries) {
455  for (int i = 0; i < 3; i++) {
456  AV_WB16(s->buf + 2*i, PNG_Q2D(mdcv->display_primaries[i][0], 50000));
457  AV_WB16(s->buf + 2*i + 2, PNG_Q2D(mdcv->display_primaries[i][1], 50000));
458  }
459  AV_WB16(s->buf + 12, PNG_Q2D(mdcv->white_point[0], 50000));
460  AV_WB16(s->buf + 14, PNG_Q2D(mdcv->white_point[1], 50000));
461  AV_WB32(s->buf + 16, PNG_Q2D(mdcv->max_luminance, 10000));
462  AV_WB32(s->buf + 20, PNG_Q2D(mdcv->min_luminance, 10000));
463  png_write_chunk(&s->bytestream, MKTAG('m', 'D', 'C', 'v'), s->buf, 24);
464  }
465  }
466 
467  if (png_get_chrm(pict->color_primaries, s->buf))
468  png_write_chunk(&s->bytestream, MKTAG('c', 'H', 'R', 'M'), s->buf, 32);
469  if (png_get_gama(pict->color_trc, s->buf))
470  png_write_chunk(&s->bytestream, MKTAG('g', 'A', 'M', 'A'), s->buf, 4);
471 
472  if (avctx->bits_per_raw_sample > 0 &&
473  avctx->bits_per_raw_sample < (s->color_type & PNG_COLOR_MASK_PALETTE ? 8 : s->bit_depth)) {
474  int len = s->color_type & PNG_COLOR_MASK_PALETTE ? 3 : ff_png_get_nb_channels(s->color_type);
475  memset(s->buf, avctx->bits_per_raw_sample, len);
476  png_write_chunk(&s->bytestream, MKTAG('s', 'B', 'I', 'T'), s->buf, len);
477  }
478 
479  /* put the palette if needed, must be after colorspace information */
480  if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
481  int has_alpha, alpha, i;
482  unsigned int v;
483  uint32_t *palette;
484  uint8_t *ptr, *alpha_ptr;
485 
486  palette = (uint32_t *)pict->data[1];
487  ptr = s->buf;
488  alpha_ptr = s->buf + 256 * 3;
489  has_alpha = 0;
490  for (i = 0; i < 256; i++) {
491  v = palette[i];
492  alpha = v >> 24;
493  if (alpha != 0xff)
494  has_alpha = 1;
495  *alpha_ptr++ = alpha;
496  bytestream_put_be24(&ptr, v);
497  }
498  png_write_chunk(&s->bytestream,
499  MKTAG('P', 'L', 'T', 'E'), s->buf, 256 * 3);
500  if (has_alpha) {
501  png_write_chunk(&s->bytestream,
502  MKTAG('t', 'R', 'N', 'S'), s->buf + 256 * 3, 256);
503  }
504  }
505 
506  return 0;
507 }
508 
509 static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
510 {
511  PNGEncContext *s = avctx->priv_data;
512  z_stream *const zstream = &s->zstream.zstream;
513  const AVFrame *const p = pict;
514  int y, len, ret;
515  int row_size, pass_row_size;
516  uint8_t *crow_buf, *crow;
517  uint8_t *crow_base = NULL;
518  uint8_t *progressive_buf = NULL;
519  uint8_t *top_buf = NULL;
520 
521  row_size = (pict->width * s->bits_per_pixel + 7) >> 3;
522 
523  crow_base = av_malloc((row_size + 32) << (s->filter_type == PNG_FILTER_VALUE_MIXED));
524  if (!crow_base) {
525  ret = AVERROR(ENOMEM);
526  goto the_end;
527  }
528  // pixel data should be aligned, but there's a control byte before it
529  crow_buf = crow_base + 15;
530  if (s->is_progressive) {
531  progressive_buf = av_malloc(row_size + 1);
532  top_buf = av_malloc(row_size + 1);
533  if (!progressive_buf || !top_buf) {
534  ret = AVERROR(ENOMEM);
535  goto the_end;
536  }
537  }
538 
539  /* put each row */
540  zstream->avail_out = IOBUF_SIZE;
541  zstream->next_out = s->buf;
542  if (s->is_progressive) {
543  int pass;
544 
545  for (pass = 0; pass < NB_PASSES; pass++) {
546  /* NOTE: a pass is completely omitted if no pixels would be
547  * output */
548  pass_row_size = ff_png_pass_row_size(pass, s->bits_per_pixel, pict->width);
549  if (pass_row_size > 0) {
550  uint8_t *top = NULL;
551  for (y = 0; y < pict->height; y++)
552  if ((ff_png_pass_ymask[pass] << (y & 7)) & 0x80) {
553  const uint8_t *ptr = p->data[0] + y * p->linesize[0];
554  FFSWAP(uint8_t *, progressive_buf, top_buf);
555  png_get_interlaced_row(progressive_buf, pass_row_size,
556  s->bits_per_pixel, pass,
557  ptr, pict->width);
558  crow = png_choose_filter(s, crow_buf, progressive_buf,
559  top, pass_row_size, s->bits_per_pixel >> 3);
560  png_write_row(avctx, crow, pass_row_size + 1);
561  top = progressive_buf;
562  }
563  }
564  }
565  } else {
566  const uint8_t *top = NULL;
567  for (y = 0; y < pict->height; y++) {
568  const uint8_t *ptr = p->data[0] + y * p->linesize[0];
569  crow = png_choose_filter(s, crow_buf, ptr, top,
570  row_size, s->bits_per_pixel >> 3);
571  png_write_row(avctx, crow, row_size + 1);
572  top = ptr;
573  }
574  }
575  /* compress last bytes */
576  for (;;) {
577  ret = deflate(zstream, Z_FINISH);
578  if (ret == Z_OK || ret == Z_STREAM_END) {
579  len = IOBUF_SIZE - zstream->avail_out;
580  if (len > 0 && s->bytestream_end - s->bytestream > len + 100) {
581  png_write_image_data(avctx, s->buf, len);
582  }
583  zstream->avail_out = IOBUF_SIZE;
584  zstream->next_out = s->buf;
585  if (ret == Z_STREAM_END)
586  break;
587  } else {
588  ret = -1;
589  goto the_end;
590  }
591  }
592 
593  ret = 0;
594 
595 the_end:
596  av_freep(&crow_base);
597  av_freep(&progressive_buf);
598  av_freep(&top_buf);
599  deflateReset(zstream);
600  return ret;
601 }
602 
603 static int add_icc_profile_size(AVCodecContext *avctx, const AVFrame *pict,
604  uint64_t *max_packet_size)
605 {
606  PNGEncContext *s = avctx->priv_data;
607  const AVFrameSideData *sd;
608  const int hdr_size = 128;
609  uint64_t new_pkt_size;
610  uLong bound;
611 
612  if (!pict)
613  return 0;
615  if (!sd || !sd->size)
616  return 0;
617  if (sd->size != (uLong) sd->size)
618  return AVERROR_INVALIDDATA;
619 
620  bound = deflateBound(&s->zstream.zstream, sd->size);
621  if (bound > INT32_MAX - hdr_size)
622  return AVERROR_INVALIDDATA;
623 
624  new_pkt_size = *max_packet_size + bound + hdr_size;
625  if (new_pkt_size < *max_packet_size)
626  return AVERROR_INVALIDDATA;
627  *max_packet_size = new_pkt_size;
628  return 0;
629 }
630 
631 static int encode_png(AVCodecContext *avctx, AVPacket *pkt,
632  const AVFrame *pict, int *got_packet)
633 {
634  PNGEncContext *s = avctx->priv_data;
635  int ret;
636  int enc_row_size;
637  uint64_t max_packet_size;
638 
639  enc_row_size = deflateBound(&s->zstream.zstream,
640  (avctx->width * s->bits_per_pixel + 7) >> 3);
641  max_packet_size =
642  FF_INPUT_BUFFER_MIN_SIZE + // headers
643  avctx->height * (
644  enc_row_size +
645  12 * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // IDAT * ceil(enc_row_size / IOBUF_SIZE)
646  );
647  if ((ret = add_icc_profile_size(avctx, pict, &max_packet_size)))
648  return ret;
649  ret = ff_alloc_packet(avctx, pkt, max_packet_size);
650  if (ret < 0)
651  return ret;
652 
653  s->bytestream_start =
654  s->bytestream = pkt->data;
655  s->bytestream_end = pkt->data + pkt->size;
656 
657  AV_WB64(s->bytestream, PNGSIG);
658  s->bytestream += 8;
659 
660  ret = encode_headers(avctx, pict);
661  if (ret < 0)
662  return ret;
663 
664  ret = encode_frame(avctx, pict);
665  if (ret < 0)
666  return ret;
667 
668  png_write_chunk(&s->bytestream, MKTAG('I', 'E', 'N', 'D'), NULL, 0);
669 
670  pkt->size = s->bytestream - s->bytestream_start;
672  *got_packet = 1;
673 
674  return 0;
675 }
676 
678  APNGFctlChunk *fctl_chunk, uint8_t bpp)
679 {
680  // output: background, input: foreground
681  // output the image such that when blended with the background, will produce the foreground
682 
683  unsigned int x, y;
684  unsigned int leftmost_x = input->width;
685  unsigned int rightmost_x = 0;
686  unsigned int topmost_y = input->height;
687  unsigned int bottommost_y = 0;
688  const uint8_t *input_data = input->data[0];
689  uint8_t *output_data = output->data[0];
690  ptrdiff_t input_linesize = input->linesize[0];
691  ptrdiff_t output_linesize = output->linesize[0];
692 
693  // Find bounding box of changes
694  for (y = 0; y < input->height; ++y) {
695  for (x = 0; x < input->width; ++x) {
696  if (!memcmp(input_data + bpp * x, output_data + bpp * x, bpp))
697  continue;
698 
699  if (x < leftmost_x)
700  leftmost_x = x;
701  if (x >= rightmost_x)
702  rightmost_x = x + 1;
703  if (y < topmost_y)
704  topmost_y = y;
705  if (y >= bottommost_y)
706  bottommost_y = y + 1;
707  }
708 
709  input_data += input_linesize;
710  output_data += output_linesize;
711  }
712 
713  if (leftmost_x == input->width && rightmost_x == 0) {
714  // Empty frame
715  // APNG does not support empty frames, so we make it a 1x1 frame
716  leftmost_x = topmost_y = 0;
717  rightmost_x = bottommost_y = 1;
718  }
719 
720  // Do actual inverse blending
721  if (fctl_chunk->blend_op == APNG_BLEND_OP_SOURCE) {
722  output_data = output->data[0];
723  for (y = topmost_y; y < bottommost_y; ++y) {
724  memcpy(output_data,
725  input->data[0] + input_linesize * y + bpp * leftmost_x,
726  bpp * (rightmost_x - leftmost_x));
727  output_data += output_linesize;
728  }
729  } else { // APNG_BLEND_OP_OVER
730  size_t transparent_palette_index;
731  uint32_t *palette;
732 
733  switch (input->format) {
734  case AV_PIX_FMT_RGBA64BE:
735  case AV_PIX_FMT_YA16BE:
736  case AV_PIX_FMT_RGBA:
737  case AV_PIX_FMT_GRAY8A:
738  break;
739 
740  case AV_PIX_FMT_PAL8:
741  palette = (uint32_t*)input->data[1];
742  for (transparent_palette_index = 0; transparent_palette_index < 256; ++transparent_palette_index)
743  if (palette[transparent_palette_index] >> 24 == 0)
744  break;
745  break;
746 
747  default:
748  // No alpha, so blending not possible
749  return -1;
750  }
751 
752  for (y = topmost_y; y < bottommost_y; ++y) {
753  const uint8_t *foreground = input->data[0] + input_linesize * y + bpp * leftmost_x;
754  uint8_t *background = output->data[0] + output_linesize * y + bpp * leftmost_x;
755  output_data = output->data[0] + output_linesize * (y - topmost_y);
756  for (x = leftmost_x; x < rightmost_x; ++x, foreground += bpp, background += bpp, output_data += bpp) {
757  if (!memcmp(foreground, background, bpp)) {
758  if (input->format == AV_PIX_FMT_PAL8) {
759  if (transparent_palette_index == 256) {
760  // Need fully transparent colour, but none exists
761  return -1;
762  }
763 
764  *output_data = transparent_palette_index;
765  } else {
766  memset(output_data, 0, bpp);
767  }
768  continue;
769  }
770 
771  // Check for special alpha values, since full inverse
772  // alpha-on-alpha blending is rarely possible, and when
773  // possible, doesn't compress much better than
774  // APNG_BLEND_OP_SOURCE blending
775  switch (input->format) {
776  case AV_PIX_FMT_RGBA64BE:
777  if (((uint16_t*)foreground)[3] == 0xffff ||
778  ((uint16_t*)background)[3] == 0)
779  break;
780  return -1;
781 
782  case AV_PIX_FMT_YA16BE:
783  if (((uint16_t*)foreground)[1] == 0xffff ||
784  ((uint16_t*)background)[1] == 0)
785  break;
786  return -1;
787 
788  case AV_PIX_FMT_RGBA:
789  if (foreground[3] == 0xff || background[3] == 0)
790  break;
791  return -1;
792 
793  case AV_PIX_FMT_GRAY8A:
794  if (foreground[1] == 0xff || background[1] == 0)
795  break;
796  return -1;
797 
798  case AV_PIX_FMT_PAL8:
799  if (palette[*foreground] >> 24 == 0xff ||
800  palette[*background] >> 24 == 0)
801  break;
802  return -1;
803  }
804 
805  memmove(output_data, foreground, bpp);
806  }
807  }
808  }
809 
810  output->width = rightmost_x - leftmost_x;
811  output->height = bottommost_y - topmost_y;
812  fctl_chunk->width = output->width;
813  fctl_chunk->height = output->height;
814  fctl_chunk->x_offset = leftmost_x;
815  fctl_chunk->y_offset = topmost_y;
816 
817  return 0;
818 }
819 
820 static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict,
821  APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
822 {
823  PNGEncContext *s = avctx->priv_data;
824  int ret;
825  unsigned int y;
826  AVFrame* diffFrame;
827  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
828  uint8_t *original_bytestream, *original_bytestream_end;
829  uint8_t *temp_bytestream = 0, *temp_bytestream_end;
830  uint32_t best_sequence_number;
831  uint8_t *best_bytestream;
832  size_t best_bytestream_size = SIZE_MAX;
833  APNGFctlChunk last_fctl_chunk = *best_last_fctl_chunk;
834  APNGFctlChunk fctl_chunk = *best_fctl_chunk;
835 
836  if (avctx->frame_num == 0) {
837  best_fctl_chunk->width = pict->width;
838  best_fctl_chunk->height = pict->height;
839  best_fctl_chunk->x_offset = 0;
840  best_fctl_chunk->y_offset = 0;
841  best_fctl_chunk->blend_op = APNG_BLEND_OP_SOURCE;
842  return encode_frame(avctx, pict);
843  }
844 
845  diffFrame = av_frame_alloc();
846  if (!diffFrame)
847  return AVERROR(ENOMEM);
848 
849  diffFrame->format = pict->format;
850  diffFrame->width = pict->width;
851  diffFrame->height = pict->height;
852  if ((ret = av_frame_get_buffer(diffFrame, 0)) < 0)
853  goto fail;
854 
855  original_bytestream = s->bytestream;
856  original_bytestream_end = s->bytestream_end;
857 
858  temp_bytestream = av_malloc(original_bytestream_end - original_bytestream);
859  if (!temp_bytestream) {
860  ret = AVERROR(ENOMEM);
861  goto fail;
862  }
863  temp_bytestream_end = temp_bytestream + (original_bytestream_end - original_bytestream);
864 
865  for (last_fctl_chunk.dispose_op = 0; last_fctl_chunk.dispose_op < 3; ++last_fctl_chunk.dispose_op) {
866  // 0: APNG_DISPOSE_OP_NONE
867  // 1: APNG_DISPOSE_OP_BACKGROUND
868  // 2: APNG_DISPOSE_OP_PREVIOUS
869 
870  for (fctl_chunk.blend_op = 0; fctl_chunk.blend_op < 2; ++fctl_chunk.blend_op) {
871  // 0: APNG_BLEND_OP_SOURCE
872  // 1: APNG_BLEND_OP_OVER
873 
874  uint32_t original_sequence_number = s->sequence_number, sequence_number;
875  uint8_t *bytestream_start = s->bytestream;
876  size_t bytestream_size;
877 
878  // Do disposal
879  if (last_fctl_chunk.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
880  diffFrame->width = pict->width;
881  diffFrame->height = pict->height;
882  ret = av_frame_copy(diffFrame, s->last_frame);
883  if (ret < 0)
884  goto fail;
885 
886  if (last_fctl_chunk.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
887  for (y = last_fctl_chunk.y_offset; y < last_fctl_chunk.y_offset + last_fctl_chunk.height; ++y) {
888  size_t row_start = diffFrame->linesize[0] * y + bpp * last_fctl_chunk.x_offset;
889  memset(diffFrame->data[0] + row_start, 0, bpp * last_fctl_chunk.width);
890  }
891  }
892  } else {
893  if (!s->prev_frame)
894  continue;
895 
896  diffFrame->width = pict->width;
897  diffFrame->height = pict->height;
898  ret = av_frame_copy(diffFrame, s->prev_frame);
899  if (ret < 0)
900  goto fail;
901  }
902 
903  // Do inverse blending
904  if (apng_do_inverse_blend(diffFrame, pict, &fctl_chunk, bpp) < 0)
905  continue;
906 
907  // Do encoding
908  ret = encode_frame(avctx, diffFrame);
909  sequence_number = s->sequence_number;
910  s->sequence_number = original_sequence_number;
911  bytestream_size = s->bytestream - bytestream_start;
912  s->bytestream = bytestream_start;
913  if (ret < 0)
914  goto fail;
915 
916  if (bytestream_size < best_bytestream_size) {
917  *best_fctl_chunk = fctl_chunk;
918  *best_last_fctl_chunk = last_fctl_chunk;
919 
920  best_sequence_number = sequence_number;
921  best_bytestream = s->bytestream;
922  best_bytestream_size = bytestream_size;
923 
924  if (best_bytestream == original_bytestream) {
925  s->bytestream = temp_bytestream;
926  s->bytestream_end = temp_bytestream_end;
927  } else {
928  s->bytestream = original_bytestream;
929  s->bytestream_end = original_bytestream_end;
930  }
931  }
932  }
933  }
934 
935  s->sequence_number = best_sequence_number;
936  s->bytestream = original_bytestream + best_bytestream_size;
937  s->bytestream_end = original_bytestream_end;
938  if (best_bytestream != original_bytestream)
939  memcpy(original_bytestream, best_bytestream, best_bytestream_size);
940 
941  ret = 0;
942 
943 fail:
944  av_freep(&temp_bytestream);
945  av_frame_free(&diffFrame);
946  return ret;
947 }
948 
950  const AVFrame *pict, int *got_packet)
951 {
952  PNGEncContext *s = avctx->priv_data;
953  int ret;
954  int enc_row_size;
955  uint64_t max_packet_size;
956  APNGFctlChunk fctl_chunk = {0};
957 
958  if (pict && s->color_type == PNG_COLOR_TYPE_PALETTE) {
959  uint32_t checksum = ~av_crc(av_crc_get_table(AV_CRC_32_IEEE_LE), ~0U, pict->data[1], 256 * sizeof(uint32_t));
960 
961  if (avctx->frame_num == 0) {
962  s->palette_checksum = checksum;
963  } else if (checksum != s->palette_checksum) {
964  av_log(avctx, AV_LOG_ERROR,
965  "Input contains more than one unique palette. APNG does not support multiple palettes.\n");
966  return -1;
967  }
968  }
969 
970  enc_row_size = deflateBound(&s->zstream.zstream,
971  (avctx->width * s->bits_per_pixel + 7) >> 3);
972  max_packet_size =
973  FF_INPUT_BUFFER_MIN_SIZE + // headers
974  avctx->height * (
975  enc_row_size +
976  (4 + 12) * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // fdAT * ceil(enc_row_size / IOBUF_SIZE)
977  );
978  if ((ret = add_icc_profile_size(avctx, pict, &max_packet_size)))
979  return ret;
980  if (max_packet_size > INT_MAX)
981  return AVERROR(ENOMEM);
982 
983  if (avctx->frame_num == 0) {
984  if (!pict)
985  return AVERROR(EINVAL);
986 
987  s->bytestream = s->extra_data = av_malloc(FF_INPUT_BUFFER_MIN_SIZE);
988  if (!s->extra_data)
989  return AVERROR(ENOMEM);
990 
991  ret = encode_headers(avctx, pict);
992  if (ret < 0)
993  return ret;
994 
995  s->extra_data_size = s->bytestream - s->extra_data;
996 
997  s->last_frame_packet = av_malloc(max_packet_size);
998  if (!s->last_frame_packet)
999  return AVERROR(ENOMEM);
1000  } else if (s->last_frame) {
1001  ret = ff_get_encode_buffer(avctx, pkt, s->last_frame_packet_size, 0);
1002  if (ret < 0)
1003  return ret;
1004 
1005  memcpy(pkt->data, s->last_frame_packet, s->last_frame_packet_size);
1006  pkt->pts = s->last_frame->pts;
1007  pkt->duration = s->last_frame->duration;
1008 
1009  ret = ff_encode_reordered_opaque(avctx, pkt, s->last_frame);
1010  if (ret < 0)
1011  return ret;
1012  }
1013 
1014  if (pict) {
1015  s->bytestream_start =
1016  s->bytestream = s->last_frame_packet;
1017  s->bytestream_end = s->bytestream + max_packet_size;
1018 
1019  // We're encoding the frame first, so we have to do a bit of shuffling around
1020  // to have the image data write to the correct place in the buffer
1021  fctl_chunk.sequence_number = s->sequence_number;
1022  ++s->sequence_number;
1023  s->bytestream += APNG_FCTL_CHUNK_SIZE + 12;
1024 
1025  ret = apng_encode_frame(avctx, pict, &fctl_chunk, &s->last_frame_fctl);
1026  if (ret < 0)
1027  return ret;
1028 
1029  fctl_chunk.delay_num = 0; // delay filled in during muxing
1030  fctl_chunk.delay_den = 0;
1031  } else {
1032  s->last_frame_fctl.dispose_op = APNG_DISPOSE_OP_NONE;
1033  }
1034 
1035  if (s->last_frame) {
1036  uint8_t* last_fctl_chunk_start = pkt->data;
1037  uint8_t buf[APNG_FCTL_CHUNK_SIZE];
1038  if (!s->extra_data_updated) {
1039  uint8_t *side_data = av_packet_new_side_data(pkt, AV_PKT_DATA_NEW_EXTRADATA, s->extra_data_size);
1040  if (!side_data)
1041  return AVERROR(ENOMEM);
1042  memcpy(side_data, s->extra_data, s->extra_data_size);
1043  s->extra_data_updated = 1;
1044  }
1045 
1046  AV_WB32(buf + 0, s->last_frame_fctl.sequence_number);
1047  AV_WB32(buf + 4, s->last_frame_fctl.width);
1048  AV_WB32(buf + 8, s->last_frame_fctl.height);
1049  AV_WB32(buf + 12, s->last_frame_fctl.x_offset);
1050  AV_WB32(buf + 16, s->last_frame_fctl.y_offset);
1051  AV_WB16(buf + 20, s->last_frame_fctl.delay_num);
1052  AV_WB16(buf + 22, s->last_frame_fctl.delay_den);
1053  buf[24] = s->last_frame_fctl.dispose_op;
1054  buf[25] = s->last_frame_fctl.blend_op;
1055  png_write_chunk(&last_fctl_chunk_start, MKTAG('f', 'c', 'T', 'L'), buf, sizeof(buf));
1056 
1057  *got_packet = 1;
1058  }
1059 
1060  if (pict) {
1061  if (!s->last_frame) {
1062  s->last_frame = av_frame_alloc();
1063  if (!s->last_frame)
1064  return AVERROR(ENOMEM);
1065  } else if (s->last_frame_fctl.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
1066  if (!s->prev_frame) {
1067  s->prev_frame = av_frame_alloc();
1068  if (!s->prev_frame)
1069  return AVERROR(ENOMEM);
1070 
1071  s->prev_frame->format = pict->format;
1072  s->prev_frame->width = pict->width;
1073  s->prev_frame->height = pict->height;
1074  if ((ret = av_frame_get_buffer(s->prev_frame, 0)) < 0)
1075  return ret;
1076  }
1077 
1078  // Do disposal, but not blending
1079  av_frame_copy(s->prev_frame, s->last_frame);
1080  if (s->last_frame_fctl.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
1081  uint32_t y;
1082  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
1083  for (y = s->last_frame_fctl.y_offset; y < s->last_frame_fctl.y_offset + s->last_frame_fctl.height; ++y) {
1084  size_t row_start = s->prev_frame->linesize[0] * y + bpp * s->last_frame_fctl.x_offset;
1085  memset(s->prev_frame->data[0] + row_start, 0, bpp * s->last_frame_fctl.width);
1086  }
1087  }
1088  }
1089 
1090  ret = av_frame_replace(s->last_frame, pict);
1091  if (ret < 0)
1092  return ret;
1093 
1094  s->last_frame_fctl = fctl_chunk;
1095  s->last_frame_packet_size = s->bytestream - s->bytestream_start;
1096  } else {
1097  av_frame_free(&s->last_frame);
1098  }
1099 
1100  return 0;
1101 }
1102 
1104 {
1105  PNGEncContext *s = avctx->priv_data;
1106  int compression_level;
1107 
1108  switch (avctx->pix_fmt) {
1109  case AV_PIX_FMT_RGBA:
1110  avctx->bits_per_coded_sample = 32;
1111  break;
1112  case AV_PIX_FMT_RGB24:
1113  avctx->bits_per_coded_sample = 24;
1114  break;
1115  case AV_PIX_FMT_GRAY8:
1116  avctx->bits_per_coded_sample = 0x28;
1117  break;
1118  case AV_PIX_FMT_MONOBLACK:
1119  avctx->bits_per_coded_sample = 1;
1120  break;
1121  case AV_PIX_FMT_PAL8:
1122  avctx->bits_per_coded_sample = 8;
1123  }
1124 
1125  ff_llvidencdsp_init(&s->llvidencdsp);
1126 
1127  if (avctx->pix_fmt == AV_PIX_FMT_MONOBLACK)
1128  s->filter_type = PNG_FILTER_VALUE_NONE;
1129 
1130  if (s->dpi && s->dpm) {
1131  av_log(avctx, AV_LOG_ERROR, "Only one of 'dpi' or 'dpm' options should be set\n");
1132  return AVERROR(EINVAL);
1133  } else if (s->dpi) {
1134  s->dpm = s->dpi * 10000 / 254;
1135  }
1136 
1137  s->is_progressive = !!(avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT);
1138  switch (avctx->pix_fmt) {
1139  case AV_PIX_FMT_RGBA64BE:
1140  s->bit_depth = 16;
1141  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1142  break;
1143  case AV_PIX_FMT_RGB48BE:
1144  s->bit_depth = 16;
1145  s->color_type = PNG_COLOR_TYPE_RGB;
1146  break;
1147  case AV_PIX_FMT_RGBA:
1148  s->bit_depth = 8;
1149  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1150  break;
1151  case AV_PIX_FMT_RGB24:
1152  s->bit_depth = 8;
1153  s->color_type = PNG_COLOR_TYPE_RGB;
1154  break;
1155  case AV_PIX_FMT_GRAY16BE:
1156  s->bit_depth = 16;
1157  s->color_type = PNG_COLOR_TYPE_GRAY;
1158  break;
1159  case AV_PIX_FMT_GRAY8:
1160  s->bit_depth = 8;
1161  s->color_type = PNG_COLOR_TYPE_GRAY;
1162  break;
1163  case AV_PIX_FMT_GRAY8A:
1164  s->bit_depth = 8;
1165  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1166  break;
1167  case AV_PIX_FMT_YA16BE:
1168  s->bit_depth = 16;
1169  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1170  break;
1171  case AV_PIX_FMT_MONOBLACK:
1172  s->bit_depth = 1;
1173  s->color_type = PNG_COLOR_TYPE_GRAY;
1174  break;
1175  case AV_PIX_FMT_PAL8:
1176  s->bit_depth = 8;
1177  s->color_type = PNG_COLOR_TYPE_PALETTE;
1178  break;
1179  default:
1180  return -1;
1181  }
1182  s->bits_per_pixel = ff_png_get_nb_channels(s->color_type) * s->bit_depth;
1183 
1184  compression_level = avctx->compression_level == FF_COMPRESSION_DEFAULT
1185  ? Z_DEFAULT_COMPRESSION
1186  : av_clip(avctx->compression_level, 0, 9);
1187  return ff_deflate_init(&s->zstream, compression_level, avctx);
1188 }
1189 
1191 {
1192  PNGEncContext *s = avctx->priv_data;
1193 
1194  ff_deflate_end(&s->zstream);
1195  av_frame_free(&s->last_frame);
1196  av_frame_free(&s->prev_frame);
1197  av_freep(&s->last_frame_packet);
1198  av_freep(&s->extra_data);
1199  s->extra_data_size = 0;
1200  return 0;
1201 }
1202 
1203 #define OFFSET(x) offsetof(PNGEncContext, x)
1204 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
1205 static const AVOption options[] = {
1206  {"dpi", "Set image resolution (in dots per inch)", OFFSET(dpi), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1207  {"dpm", "Set image resolution (in dots per meter)", OFFSET(dpm), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1208  { "pred", "Prediction method", OFFSET(filter_type), AV_OPT_TYPE_INT, { .i64 = PNG_FILTER_VALUE_NONE }, PNG_FILTER_VALUE_NONE, PNG_FILTER_VALUE_MIXED, VE, .unit = "pred" },
1209  { "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_NONE }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1210  { "sub", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_SUB }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1211  { "up", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_UP }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1212  { "avg", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_AVG }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1213  { "paeth", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_PAETH }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1214  { "mixed", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_MIXED }, INT_MIN, INT_MAX, VE, .unit = "pred" },
1215  { NULL},
1216 };
1217 
1218 static const AVClass pngenc_class = {
1219  .class_name = "(A)PNG encoder",
1220  .item_name = av_default_item_name,
1221  .option = options,
1222  .version = LIBAVUTIL_VERSION_INT,
1223 };
1224 
1226  .p.name = "png",
1227  CODEC_LONG_NAME("PNG (Portable Network Graphics) image"),
1228  .p.type = AVMEDIA_TYPE_VIDEO,
1229  .p.id = AV_CODEC_ID_PNG,
1230  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS |
1232  .priv_data_size = sizeof(PNGEncContext),
1233  .init = png_enc_init,
1234  .close = png_enc_close,
1236  .p.pix_fmts = (const enum AVPixelFormat[]) {
1243  },
1244  .p.priv_class = &pngenc_class,
1245  .caps_internal = FF_CODEC_CAP_ICC_PROFILES,
1246 };
1247 
1249  .p.name = "apng",
1250  CODEC_LONG_NAME("APNG (Animated Portable Network Graphics) image"),
1251  .p.type = AVMEDIA_TYPE_VIDEO,
1252  .p.id = AV_CODEC_ID_APNG,
1253  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY |
1255  .priv_data_size = sizeof(PNGEncContext),
1256  .init = png_enc_init,
1257  .close = png_enc_close,
1259  .p.pix_fmts = (const enum AVPixelFormat[]) {
1266  },
1267  .p.priv_class = &pngenc_class,
1268  .caps_internal = FF_CODEC_CAP_ICC_PROFILES,
1269 };
AVFrame::color_trc
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:672
ff_encode_reordered_opaque
int ff_encode_reordered_opaque(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *frame)
Propagate user opaque values from the frame to avctx/pkt as needed.
Definition: encode.c:235
AVMasteringDisplayMetadata::has_primaries
int has_primaries
Flag indicating whether the display primaries (and white point) are set.
Definition: mastering_display_metadata.h:62
encode_frame
static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:509
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:215
AVFrame::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:668
AVMasteringDisplayMetadata::max_luminance
AVRational max_luminance
Max luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:57
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
entry
#define entry
Definition: aom_film_grain_template.c:66
av_clip
#define av_clip
Definition: common.h:100
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
PNGEncContext::buf
uint8_t buf[IOBUF_SIZE]
Definition: pngenc.c:64
AV_WL32
#define AV_WL32(p, v)
Definition: intreadwrite.h:422
AVColorTransferCharacteristic
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:611
libm.h
ff_png_encoder
const FFCodec ff_png_encoder
Definition: pngenc.c:1225
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:304
av_frame_get_side_data
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:963
AVColorPrimariesDesc
Struct that contains both white point location and primaries location, providing the complete descrip...
Definition: csp.h:78
AVCRC
uint32_t AVCRC
Definition: crc.h:46
png_get_chrm
static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
Definition: pngenc.c:302
AV_PKT_DATA_NEW_EXTRADATA
@ AV_PKT_DATA_NEW_EXTRADATA
The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was...
Definition: packet.h:56
APNG_FCTL_CHUNK_SIZE
#define APNG_FCTL_CHUNK_SIZE
Definition: apng.h:42
ff_png_get_nb_channels
int ff_png_get_nb_channels(int color_type)
Definition: png.c:41
PNGEncContext::bits_per_pixel
int bits_per_pixel
Definition: pngenc.c:71
AVMasteringDisplayMetadata::display_primaries
AVRational display_primaries[3][2]
CIE 1931 xy chromaticity coords of color primaries (r, g, b order).
Definition: mastering_display_metadata.h:42
src1
const pixel * src1
Definition: h264pred_template.c:421
AVMasteringDisplayMetadata::has_luminance
int has_luminance
Flag indicating whether the luminance (min_ and max_) have been set.
Definition: mastering_display_metadata.h:67
rational.h
PNGEncContext::last_frame
AVFrame * last_frame
Definition: pngenc.c:81
int64_t
long long int64_t
Definition: coverity.c:34
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
AVFrame::color_primaries
enum AVColorPrimaries color_primaries
Definition: frame.h:670
mask
int mask
Definition: mediacodecdec_common.c:154
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
apng_encode_frame
static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict, APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
Definition: pngenc.c:820
AVContentLightMetadata::MaxCLL
unsigned MaxCLL
Max content light level (cd/m^2).
Definition: mastering_display_metadata.h:111
APNGFctlChunk::delay_num
uint16_t delay_num
Definition: pngenc.c:49
test::height
int height
Definition: vc1dsp.c:40
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
AV_PIX_FMT_RGBA64BE
@ AV_PIX_FMT_RGBA64BE
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
Definition: pixfmt.h:202
AVFrame::width
int width
Definition: frame.h:461
PNG_FILTER_VALUE_MIXED
#define PNG_FILTER_VALUE_MIXED
Definition: png.h:45
w
uint8_t w
Definition: llviddspenc.c:38
AVPacket::data
uint8_t * data
Definition: packet.h:539
AVOption
AVOption.
Definition: opt.h:429
encode.h
b
#define b
Definition: input.c:41
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:614
data
const char data[16]
Definition: mxf.c:149
png_write_row
static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
Definition: pngenc.c:277
FFCodec
Definition: codec_internal.h:127
output_data
static int output_data(MLPDecodeContext *m, unsigned int substr, AVFrame *frame, int *got_frame_ptr)
Write the audio data into the output buffer.
Definition: mlpdec.c:1108
PNGEncContext::dpm
int dpm
Physical pixel density, in dots per meter, if set.
Definition: pngenc.c:66
AVPacket::duration
int64_t duration
Duration of this packet in AVStream->time_base units, 0 if unknown.
Definition: packet.h:557
png_get_gama
static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
Definition: pngenc.c:320
PNGEncContext::last_frame_packet
uint8_t * last_frame_packet
Definition: pngenc.c:83
AVColorPrimaries
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:586
ff_deflate_end
void ff_deflate_end(FFZStream *zstream)
Wrapper around deflateEnd().
AV_CODEC_ID_APNG
@ AV_CODEC_ID_APNG
Definition: codec_id.h:269
FF_COMPRESSION_DEFAULT
#define FF_COMPRESSION_DEFAULT
Definition: avcodec.h:1256
APNG_DISPOSE_OP_BACKGROUND
@ APNG_DISPOSE_OP_BACKGROUND
Definition: apng.h:32
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:594
FF_INPUT_BUFFER_MIN_SIZE
#define FF_INPUT_BUFFER_MIN_SIZE
Used by some encoders as upper bound for the length of headers.
Definition: encode.h:33
AV_WB64
#define AV_WB64(p, v)
Definition: intreadwrite.h:429
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:410
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
NB_PASSES
#define NB_PASSES
Definition: png.h:47
AVContentLightMetadata
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
Definition: mastering_display_metadata.h:107
crc.h
ff_apng_encoder
const FFCodec ff_apng_encoder
Definition: pngenc.c:1248
sub_png_paeth_prediction
static void sub_png_paeth_prediction(uint8_t *dst, const uint8_t *src, const uint8_t *top, int w, int bpp)
Definition: pngenc.c:126
AV_PIX_FMT_GRAY16BE
@ AV_PIX_FMT_GRAY16BE
Y , 16bpp, big-endian.
Definition: pixfmt.h:104
AV_STEREO3D_SIDEBYSIDE
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
Definition: stereo3d.h:64
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
PNGEncContext::prev_frame
AVFrame * prev_frame
Definition: pngenc.c:80
AVCOL_TRC_IEC61966_2_1
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:625
ff_png_pass_row_size
int ff_png_pass_row_size(int pass, int bits_per_pixel, int width)
Definition: png.c:54
fail
#define fail()
Definition: checkasm.h:189
AV_STEREO3D_2D
@ AV_STEREO3D_2D
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:52
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:508
APNGFctlChunk::blend_op
uint8_t blend_op
Definition: pngenc.c:50
FF_CODEC_ENCODE_CB
#define FF_CODEC_ENCODE_CB(func)
Definition: codec_internal.h:320
AVRational::num
int num
Numerator.
Definition: rational.h:59
encode_png
static int encode_png(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:631
PNG_COLOR_TYPE_RGB_ALPHA
#define PNG_COLOR_TYPE_RGB_ALPHA
Definition: png.h:36
AV_CODEC_FLAG_INTERLACED_DCT
#define AV_CODEC_FLAG_INTERLACED_DCT
Use interlaced DCT.
Definition: avcodec.h:330
png_filter_row
static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type, const uint8_t *src, const uint8_t *top, int size, int bpp)
Definition: pngenc.c:170
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
avassert.h
pkt
AVPacket * pkt
Definition: movenc.c:60
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
zlib_wrapper.h
AVFrameSideData::size
size_t size
Definition: frame.h:268
av_cold
#define av_cold
Definition: attributes.h:90
encode_apng
static int encode_apng(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:949
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:62
PNGEncContext::bytestream_end
uint8_t * bytestream_end
Definition: pngenc.c:59
stereo3d.h
AVMasteringDisplayMetadata::white_point
AVRational white_point[2]
CIE 1931 xy chromaticity coords of white point.
Definition: mastering_display_metadata.h:47
s
#define s(width, name)
Definition: cbs_vp9.c:198
av_csp_primaries_desc_from_id
const AVColorPrimariesDesc * av_csp_primaries_desc_from_id(enum AVColorPrimaries prm)
Retrieves a complete gamut description from an enum constant describing the color primaries.
Definition: csp.c:90
png_write_chunk
static void png_write_chunk(uint8_t **f, uint32_t tag, const uint8_t *buf, int length)
Definition: pngenc.c:229
APNG_BLEND_OP_SOURCE
@ APNG_BLEND_OP_SOURCE
Definition: apng.h:37
PNG_COLOR_TYPE_RGB
#define PNG_COLOR_TYPE_RGB
Definition: png.h:35
AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE
#define AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE
This encoder can reorder user opaque values from input AVFrames and return them with corresponding ou...
Definition: codec.h:159
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
AVCodecContext::bits_per_raw_sample
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:1585
PNG_Q2D
#define PNG_Q2D(q, divisor)
Definition: pngenc.c:300
png_write_image_data
static void png_write_image_data(AVCodecContext *avctx, const uint8_t *buf, int length)
Definition: pngenc.c:249
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:296
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:100
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:461
AVStereo3D::flags
int flags
Additional information about the frame packing.
Definition: stereo3d.h:212
AV_CODEC_ID_PNG
@ AV_CODEC_ID_PNG
Definition: codec_id.h:113
if
if(ret)
Definition: filter_design.txt:179
PNGEncContext
Definition: pngenc.c:53
APNGFctlChunk::y_offset
uint32_t y_offset
Definition: pngenc.c:48
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:110
AV_PIX_FMT_GRAY8A
@ AV_PIX_FMT_GRAY8A
alias for AV_PIX_FMT_YA8
Definition: pixfmt.h:143
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
APNGFctlChunk::delay_den
uint16_t delay_den
Definition: pngenc.c:49
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:75
NULL
#define NULL
Definition: coverity.c:32
apng.h
AV_WB16
#define AV_WB16(p, v)
Definition: intreadwrite.h:401
IOBUF_SIZE
#define IOBUF_SIZE
Definition: pngenc.c:43
AV_PIX_FMT_MONOBLACK
@ AV_PIX_FMT_MONOBLACK
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb.
Definition: pixfmt.h:83
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:588
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
apng_do_inverse_blend
static int apng_do_inverse_blend(AVFrame *output, const AVFrame *input, APNGFctlChunk *fctl_chunk, uint8_t bpp)
Definition: pngenc.c:677
APNGFctlChunk::width
uint32_t width
Definition: pngenc.c:47
png_enc_close
static av_cold int png_enc_close(AVCodecContext *avctx)
Definition: pngenc.c:1190
AV_FRAME_DATA_ICC_PROFILE
@ AV_FRAME_DATA_ICC_PROFILE
The data contains an ICC profile as an opaque octet buffer following the format described by ISO 1507...
Definition: frame.h:144
APNG_DISPOSE_OP_PREVIOUS
@ APNG_DISPOSE_OP_PREVIOUS
Definition: apng.h:33
PNG_COLOR_TYPE_GRAY
#define PNG_COLOR_TYPE_GRAY
Definition: png.h:33
options
Definition: swscale.c:42
deflate
static void deflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:161
PNGEncContext::filter_type
int filter_type
Definition: pngenc.c:61
AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
@ AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
Mastering display metadata associated with a video frame.
Definition: frame.h:120
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
PNGEncContext::extra_data_updated
int extra_data_updated
Definition: pngenc.c:76
APNGFctlChunk
Definition: pngenc.c:45
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ff_png_pass_ymask
const uint8_t ff_png_pass_ymask[NB_PASSES]
Definition: png.c:27
ff_llvidencdsp_init
av_cold void ff_llvidencdsp_init(LLVidEncDSPContext *c)
Definition: lossless_videoencdsp.c:100
add_icc_profile_size
static int add_icc_profile_size(AVCodecContext *avctx, const AVFrame *pict, uint64_t *max_packet_size)
Definition: pngenc.c:603
APNGFctlChunk::sequence_number
uint32_t sequence_number
Definition: pngenc.c:46
AV_WB32
#define AV_WB32(p, v)
Definition: intreadwrite.h:415
PNGEncContext::zstream
FFZStream zstream
Definition: pngenc.c:63
test::width
int width
Definition: vc1dsp.c:39
PNG_FILTER_VALUE_NONE
#define PNG_FILTER_VALUE_NONE
Definition: png.h:40
f
f
Definition: af_crystalizer.c:122
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:368
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:75
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:540
codec_internal.h
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:83
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:1015
av_bswap32
#define av_bswap32
Definition: bswap.h:47
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
AV_PIX_FMT_YA16BE
@ AV_PIX_FMT_YA16BE
16 bits gray, 16 bits alpha (big-endian)
Definition: pixfmt.h:209
PNGEncContext::last_frame_packet_size
size_t last_frame_packet_size
Definition: pngenc.c:84
PNG_FILTER_VALUE_AVG
#define PNG_FILTER_VALUE_AVG
Definition: png.h:43
size
int size
Definition: twinvq_data.h:10344
av_csp_approximate_trc_gamma
double av_csp_approximate_trc_gamma(enum AVColorTransferCharacteristic trc)
Determine a suitable 'gamma' value to match the supplied AVColorTransferCharacteristic.
Definition: csp.c:149
MKBETAG
#define MKBETAG(a, b, c, d)
Definition: macros.h:56
PNGEncContext::llvidencdsp
LLVidEncDSPContext llvidencdsp
Definition: pngenc.c:55
APNG_DISPOSE_OP_NONE
@ APNG_DISPOSE_OP_NONE
Definition: apng.h:31
AVFrameSideData::data
uint8_t * data
Definition: frame.h:267
PNG_FILTER_VALUE_PAETH
#define PNG_FILTER_VALUE_PAETH
Definition: png.h:44
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:476
PNGEncContext::extra_data
uint8_t * extra_data
Definition: pngenc.c:77
png_choose_filter
static uint8_t * png_choose_filter(PNGEncContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *top, int size, int bpp)
Definition: pngenc.c:199
PNG_FILTER_VALUE_UP
#define PNG_FILTER_VALUE_UP
Definition: png.h:42
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
csp.h
av_crc_get_table
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
OFFSET
#define OFFSET(x)
Definition: pngenc.c:1203
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:545
AV_STEREO3D_FLAG_INVERT
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:194
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
PNGSIG
#define PNGSIG
Definition: png.h:49
lossless_videoencdsp.h
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1578
PNG_FILTER_VALUE_SUB
#define PNG_FILTER_VALUE_SUB
Definition: png.h:41
AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
@ AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
Content light level (based on CTA-861.3).
Definition: frame.h:137
AV_PIX_FMT_RGB48BE
@ AV_PIX_FMT_RGB48BE
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:109
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:532
options
static const AVOption options[]
Definition: pngenc.c:1205
src2
const pixel * src2
Definition: h264pred_template.c:422
AV_FRAME_DATA_STEREO3D
@ AV_FRAME_DATA_STEREO3D
Stereoscopic 3d metadata.
Definition: frame.h:64
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:194
AVMasteringDisplayMetadata
Mastering display metadata capable of representing the color volume of the display used to master the...
Definition: mastering_display_metadata.h:38
len
int len
Definition: vorbis_enc_data.h:426
AVCodecContext::height
int height
Definition: avcodec.h:624
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:663
LLVidEncDSPContext
Definition: lossless_videoencdsp.h:25
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:700
FF_CODEC_CAP_ICC_PROFILES
#define FF_CODEC_CAP_ICC_PROFILES
Codec supports embedded ICC profiles (AV_FRAME_DATA_ICC_PROFILE).
Definition: codec_internal.h:82
sub_left_prediction
static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
Definition: pngenc.c:154
PNGEncContext::color_type
int color_type
Definition: pngenc.c:70
avcodec.h
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
AVCodecContext::frame_num
int64_t frame_num
Frame counter, set by libavcodec.
Definition: avcodec.h:2041
bound
static double bound(const double threshold, const double val)
Definition: af_dynaudnorm.c:413
tag
uint32_t tag
Definition: movenc.c:1879
ret
ret
Definition: filter_design.txt:187
pred
static const float pred[4]
Definition: siprdata.h:259
PNGEncContext::extra_data_size
int extra_data_size
Definition: pngenc.c:78
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:80
AVStereo3D::type
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:207
PNGEncContext::bit_depth
int bit_depth
Definition: pngenc.c:69
PNG_LRINT
#define PNG_LRINT(d, divisor)
Definition: pngenc.c:299
PNGEncContext::bytestream_start
uint8_t * bytestream_start
Definition: pngenc.c:58
U
#define U(x)
Definition: vpx_arith.h:37
av_frame_replace
int av_frame_replace(AVFrame *dst, const AVFrame *src)
Ensure the destination frame refers to the same data described by the source frame,...
Definition: frame.c:499
AVCodecContext
main external API structure.
Definition: avcodec.h:451
AVFrame::height
int height
Definition: frame.h:461
av_packet_new_side_data
uint8_t * av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, size_t size)
Allocate new information of a packet.
Definition: packet.c:231
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:106
av_crc
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
png_get_interlaced_row
static void png_get_interlaced_row(uint8_t *dst, int row_size, int bits_per_pixel, int pass, const uint8_t *src, int width)
Definition: pngenc.c:87
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:76
PNG_COLOR_MASK_PALETTE
#define PNG_COLOR_MASK_PALETTE
Definition: png.h:29
AVMasteringDisplayMetadata::min_luminance
AVRational min_luminance
Min luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:52
AV_WB32_PNG_D
#define AV_WB32_PNG_D(buf, q)
Definition: pngenc.c:301
AV_CRC_32_IEEE_LE
@ AV_CRC_32_IEEE_LE
Definition: crc.h:53
PNGEncContext::last_frame_fctl
APNGFctlChunk last_frame_fctl
Definition: pngenc.c:82
desc
const char * desc
Definition: libsvtav1.c:79
PNGEncContext::dpi
int dpi
Physical pixel density, in dots per inch, if set.
Definition: pngenc.c:65
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
FFZStream
Definition: zlib_wrapper.h:27
mem.h
mastering_display_metadata.h
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:265
png_enc_init
static av_cold int png_enc_init(AVCodecContext *avctx)
Definition: pngenc.c:1103
AVDictionaryEntry
Definition: dict.h:89
png_write_iccp
static int png_write_iccp(PNGEncContext *s, const AVFrameSideData *sd)
Definition: pngenc.c:330
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
AVPacket
This structure stores compressed data.
Definition: packet.h:516
AVContentLightMetadata::MaxFALL
unsigned MaxFALL
Max average light level per frame (cd/m^2).
Definition: mastering_display_metadata.h:116
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:478
png.h
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:624
bytestream.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:434
PNG_COLOR_TYPE_GRAY_ALPHA
#define PNG_COLOR_TYPE_GRAY_ALPHA
Definition: png.h:37
AVFrameSideData::metadata
AVDictionary * metadata
Definition: frame.h:269
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
APNGFctlChunk::height
uint32_t height
Definition: pngenc.c:47
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
MKTAG
#define MKTAG(a, b, c, d)
Definition: macros.h:55
AVStereo3D
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
Definition: stereo3d.h:203
width
#define width
Definition: dsp.h:85
input_data
static void input_data(MLPEncodeContext *ctx, MLPSubstream *s, uint8_t **const samples, int nb_samples)
Wrapper function for inputting data in two different bit-depths.
Definition: mlpenc.c:1224
PNGEncContext::bytestream
uint8_t * bytestream
Definition: pngenc.c:57
PNGEncContext::is_progressive
int is_progressive
Definition: pngenc.c:68
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
VE
#define VE
Definition: pngenc.c:1204
ff_alloc_packet
int ff_alloc_packet(AVCodecContext *avctx, AVPacket *avpkt, int64_t size)
Check AVPacket size and allocate data.
Definition: encode.c:62
encode_headers
static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:372
APNGFctlChunk::dispose_op
uint8_t dispose_op
Definition: pngenc.c:50
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:648
PNGEncContext::palette_checksum
uint32_t palette_checksum
Definition: pngenc.c:74
PNG_COLOR_TYPE_PALETTE
#define PNG_COLOR_TYPE_PALETTE
Definition: png.h:34
src
#define src
Definition: vp8dsp.c:248
APNGFctlChunk::x_offset
uint32_t x_offset
Definition: pngenc.c:48
ff_deflate_init
int ff_deflate_init(FFZStream *zstream, int level, void *logctx)
Wrapper around deflateInit().
PNGEncContext::sequence_number
uint32_t sequence_number
Definition: pngenc.c:75
AVCodecContext::compression_level
int compression_level
Definition: avcodec.h:1255
pngenc_class
static const AVClass pngenc_class
Definition: pngenc.c:1218