FFmpeg
pngenc.c
Go to the documentation of this file.
1 /*
2  * PNG image format
3  * Copyright (c) 2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include "avcodec.h"
23 #include "codec_internal.h"
24 #include "encode.h"
25 #include "bytestream.h"
26 #include "lossless_videoencdsp.h"
27 #include "png.h"
28 #include "apng.h"
29 #include "zlib_wrapper.h"
30 
31 #include "libavutil/avassert.h"
32 #include "libavutil/crc.h"
33 #include "libavutil/csp.h"
34 #include "libavutil/libm.h"
35 #include "libavutil/opt.h"
36 #include "libavutil/rational.h"
37 #include "libavutil/stereo3d.h"
38 
39 #include <zlib.h>
40 
41 #define IOBUF_SIZE 4096
42 
43 typedef struct APNGFctlChunk {
44  uint32_t sequence_number;
45  uint32_t width, height;
46  uint32_t x_offset, y_offset;
47  uint16_t delay_num, delay_den;
48  uint8_t dispose_op, blend_op;
50 
51 typedef struct PNGEncContext {
52  AVClass *class;
54 
55  uint8_t *bytestream;
56  uint8_t *bytestream_start;
57  uint8_t *bytestream_end;
58 
60 
62  uint8_t buf[IOBUF_SIZE];
63  int dpi; ///< Physical pixel density, in dots per inch, if set
64  int dpm; ///< Physical pixel density, in dots per meter, if set
65 
67  int bit_depth;
70 
71  // APNG
72  uint32_t palette_checksum; // Used to ensure a single unique palette
73  uint32_t sequence_number;
75  uint8_t *extra_data;
77 
84 
85 static void png_get_interlaced_row(uint8_t *dst, int row_size,
86  int bits_per_pixel, int pass,
87  const uint8_t *src, int width)
88 {
89  int x, mask, dst_x, j, b, bpp;
90  uint8_t *d;
91  const uint8_t *s;
92  static const int masks[] = {0x80, 0x08, 0x88, 0x22, 0xaa, 0x55, 0xff};
93 
94  mask = masks[pass];
95  switch (bits_per_pixel) {
96  case 1:
97  memset(dst, 0, row_size);
98  dst_x = 0;
99  for (x = 0; x < width; x++) {
100  j = (x & 7);
101  if ((mask << j) & 0x80) {
102  b = (src[x >> 3] >> (7 - j)) & 1;
103  dst[dst_x >> 3] |= b << (7 - (dst_x & 7));
104  dst_x++;
105  }
106  }
107  break;
108  default:
109  bpp = bits_per_pixel >> 3;
110  d = dst;
111  s = src;
112  for (x = 0; x < width; x++) {
113  j = x & 7;
114  if ((mask << j) & 0x80) {
115  memcpy(d, s, bpp);
116  d += bpp;
117  }
118  s += bpp;
119  }
120  break;
121  }
122 }
123 
124 static void sub_png_paeth_prediction(uint8_t *dst, const uint8_t *src, const uint8_t *top,
125  int w, int bpp)
126 {
127  int i;
128  for (i = 0; i < w; i++) {
129  int a, b, c, p, pa, pb, pc;
130 
131  a = src[i - bpp];
132  b = top[i];
133  c = top[i - bpp];
134 
135  p = b - c;
136  pc = a - c;
137 
138  pa = abs(p);
139  pb = abs(pc);
140  pc = abs(p + pc);
141 
142  if (pa <= pb && pa <= pc)
143  p = a;
144  else if (pb <= pc)
145  p = b;
146  else
147  p = c;
148  dst[i] = src[i] - p;
149  }
150 }
151 
152 static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
153 {
154  const uint8_t *src1 = src + bpp;
155  const uint8_t *src2 = src;
156  int x, unaligned_w;
157 
158  memcpy(dst, src, bpp);
159  dst += bpp;
160  size -= bpp;
161  unaligned_w = FFMIN(32 - bpp, size);
162  for (x = 0; x < unaligned_w; x++)
163  *dst++ = *src1++ - *src2++;
164  size -= unaligned_w;
165  c->llvidencdsp.diff_bytes(dst, src1, src2, size);
166 }
167 
168 static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type,
169  const uint8_t *src, const uint8_t *top, int size, int bpp)
170 {
171  int i;
172 
173  switch (filter_type) {
175  memcpy(dst, src, size);
176  break;
178  sub_left_prediction(c, dst, src, bpp, size);
179  break;
180  case PNG_FILTER_VALUE_UP:
181  c->llvidencdsp.diff_bytes(dst, src, top, size);
182  break;
184  for (i = 0; i < bpp; i++)
185  dst[i] = src[i] - (top[i] >> 1);
186  for (; i < size; i++)
187  dst[i] = src[i] - ((src[i - bpp] + top[i]) >> 1);
188  break;
190  for (i = 0; i < bpp; i++)
191  dst[i] = src[i] - top[i];
192  sub_png_paeth_prediction(dst + i, src + i, top + i, size - i, bpp);
193  break;
194  }
195 }
196 
197 static uint8_t *png_choose_filter(PNGEncContext *s, uint8_t *dst,
198  const uint8_t *src, const uint8_t *top, int size, int bpp)
199 {
200  int pred = s->filter_type;
201  av_assert0(bpp || !pred);
202  if (!top && pred)
204  if (pred == PNG_FILTER_VALUE_MIXED) {
205  int i;
206  int cost, bcost = INT_MAX;
207  uint8_t *buf1 = dst, *buf2 = dst + size + 16;
208  for (pred = 0; pred < 5; pred++) {
209  png_filter_row(s, buf1 + 1, pred, src, top, size, bpp);
210  buf1[0] = pred;
211  cost = 0;
212  for (i = 0; i <= size; i++)
213  cost += abs((int8_t) buf1[i]);
214  if (cost < bcost) {
215  bcost = cost;
216  FFSWAP(uint8_t *, buf1, buf2);
217  }
218  }
219  return buf2;
220  } else {
221  png_filter_row(s, dst + 1, pred, src, top, size, bpp);
222  dst[0] = pred;
223  return dst;
224  }
225 }
226 
227 static void png_write_chunk(uint8_t **f, uint32_t tag,
228  const uint8_t *buf, int length)
229 {
230  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
231  uint32_t crc = ~0U;
232  uint8_t tagbuf[4];
233 
234  bytestream_put_be32(f, length);
235  AV_WL32(tagbuf, tag);
236  crc = av_crc(crc_table, crc, tagbuf, 4);
237  bytestream_put_be32(f, av_bswap32(tag));
238  if (length > 0) {
239  crc = av_crc(crc_table, crc, buf, length);
240  if (*f != buf)
241  memcpy(*f, buf, length);
242  *f += length;
243  }
244  bytestream_put_be32(f, ~crc);
245 }
246 
248  const uint8_t *buf, int length)
249 {
250  PNGEncContext *s = avctx->priv_data;
251  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
252  uint32_t crc = ~0U;
253 
254  if (avctx->codec_id == AV_CODEC_ID_PNG || avctx->frame_num == 0) {
255  png_write_chunk(&s->bytestream, MKTAG('I', 'D', 'A', 'T'), buf, length);
256  return;
257  }
258 
259  bytestream_put_be32(&s->bytestream, length + 4);
260 
261  bytestream_put_be32(&s->bytestream, MKBETAG('f', 'd', 'A', 'T'));
262  bytestream_put_be32(&s->bytestream, s->sequence_number);
263  crc = av_crc(crc_table, crc, s->bytestream - 8, 8);
264 
265  crc = av_crc(crc_table, crc, buf, length);
266  memcpy(s->bytestream, buf, length);
267  s->bytestream += length;
268 
269  bytestream_put_be32(&s->bytestream, ~crc);
270 
271  ++s->sequence_number;
272 }
273 
274 /* XXX: do filtering */
275 static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
276 {
277  PNGEncContext *s = avctx->priv_data;
278  z_stream *const zstream = &s->zstream.zstream;
279  int ret;
280 
281  zstream->avail_in = size;
282  zstream->next_in = data;
283  while (zstream->avail_in > 0) {
284  ret = deflate(zstream, Z_NO_FLUSH);
285  if (ret != Z_OK)
286  return -1;
287  if (zstream->avail_out == 0) {
288  if (s->bytestream_end - s->bytestream > IOBUF_SIZE + 100)
289  png_write_image_data(avctx, s->buf, IOBUF_SIZE);
290  zstream->avail_out = IOBUF_SIZE;
291  zstream->next_out = s->buf;
292  }
293  }
294  return 0;
295 }
296 
297 #define AV_WB32_PNG(buf, n) AV_WB32(buf, lrint((n) * 100000))
298 #define AV_WB32_PNG_D(buf, d) AV_WB32_PNG(buf, av_q2d(d))
299 static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
300 {
302  if (!desc)
303  return 0;
304 
305  AV_WB32_PNG_D(buf, desc->wp.x);
306  AV_WB32_PNG_D(buf + 4, desc->wp.y);
307  AV_WB32_PNG_D(buf + 8, desc->prim.r.x);
308  AV_WB32_PNG_D(buf + 12, desc->prim.r.y);
309  AV_WB32_PNG_D(buf + 16, desc->prim.g.x);
310  AV_WB32_PNG_D(buf + 20, desc->prim.g.y);
311  AV_WB32_PNG_D(buf + 24, desc->prim.b.x);
312  AV_WB32_PNG_D(buf + 28, desc->prim.b.y);
313 
314  return 1;
315 }
316 
317 static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
318 {
319  double gamma = av_csp_approximate_trc_gamma(trc);
320  if (gamma <= 1e-6)
321  return 0;
322 
323  AV_WB32_PNG(buf, 1.0 / gamma);
324  return 1;
325 }
326 
328 {
329  z_stream *const zstream = &s->zstream.zstream;
330  const AVDictionaryEntry *entry;
331  const char *name;
332  uint8_t *start, *buf;
333  int ret;
334 
335  if (!sd || !sd->size)
336  return 0;
337  zstream->next_in = sd->data;
338  zstream->avail_in = sd->size;
339 
340  /* write the chunk contents first */
341  start = s->bytestream + 8; /* make room for iCCP tag + length */
342  buf = start;
343 
344  /* profile description */
345  entry = av_dict_get(sd->metadata, "name", NULL, 0);
346  name = (entry && entry->value[0]) ? entry->value : "icc";
347  for (int i = 0;; i++) {
348  char c = (i == 79) ? 0 : name[i];
349  bytestream_put_byte(&buf, c);
350  if (!c)
351  break;
352  }
353 
354  /* compression method and profile data */
355  bytestream_put_byte(&buf, 0);
356  zstream->next_out = buf;
357  zstream->avail_out = s->bytestream_end - buf;
358  ret = deflate(zstream, Z_FINISH);
359  deflateReset(zstream);
360  if (ret != Z_STREAM_END)
361  return AVERROR_EXTERNAL;
362 
363  /* rewind to the start and write the chunk header/crc */
364  png_write_chunk(&s->bytestream, MKTAG('i', 'C', 'C', 'P'), start,
365  zstream->next_out - start);
366  return 0;
367 }
368 
369 static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
370 {
371  AVFrameSideData *side_data;
372  PNGEncContext *s = avctx->priv_data;
373  int ret;
374 
375  /* write png header */
376  AV_WB32(s->buf, avctx->width);
377  AV_WB32(s->buf + 4, avctx->height);
378  s->buf[8] = s->bit_depth;
379  s->buf[9] = s->color_type;
380  s->buf[10] = 0; /* compression type */
381  s->buf[11] = 0; /* filter type */
382  s->buf[12] = s->is_progressive; /* interlace type */
383  png_write_chunk(&s->bytestream, MKTAG('I', 'H', 'D', 'R'), s->buf, 13);
384 
385  /* write physical information */
386  if (s->dpm) {
387  AV_WB32(s->buf, s->dpm);
388  AV_WB32(s->buf + 4, s->dpm);
389  s->buf[8] = 1; /* unit specifier is meter */
390  } else {
391  AV_WB32(s->buf, avctx->sample_aspect_ratio.num);
392  AV_WB32(s->buf + 4, avctx->sample_aspect_ratio.den);
393  s->buf[8] = 0; /* unit specifier is unknown */
394  }
395  png_write_chunk(&s->bytestream, MKTAG('p', 'H', 'Y', 's'), s->buf, 9);
396 
397  /* write stereoscopic information */
399  if (side_data) {
400  AVStereo3D *stereo3d = (AVStereo3D *)side_data->data;
401  switch (stereo3d->type) {
403  s->buf[0] = ((stereo3d->flags & AV_STEREO3D_FLAG_INVERT) == 0) ? 1 : 0;
404  png_write_chunk(&s->bytestream, MKTAG('s', 'T', 'E', 'R'), s->buf, 1);
405  break;
406  case AV_STEREO3D_2D:
407  break;
408  default:
409  av_log(avctx, AV_LOG_WARNING, "Only side-by-side stereo3d flag can be defined within sTER chunk\n");
410  break;
411  }
412  }
413 
415  if ((ret = png_write_iccp(s, side_data)))
416  return ret;
417 
418  /* write colorspace information */
419  if (pict->color_primaries == AVCOL_PRI_BT709 &&
421  s->buf[0] = 1; /* rendering intent, relative colorimetric by default */
422  png_write_chunk(&s->bytestream, MKTAG('s', 'R', 'G', 'B'), s->buf, 1);
423  } else if (pict->color_trc != AVCOL_TRC_UNSPECIFIED && !side_data) {
424  /*
425  * Avoid writing cICP if the transfer is unknown. Known primaries
426  * with unknown transfer can be handled by cHRM.
427  *
428  * We also avoid writing cICP if an ICC Profile is present, because
429  * the standard requires that cICP overrides iCCP.
430  *
431  * These values match H.273 so no translation is needed.
432  */
433  s->buf[0] = pict->color_primaries;
434  s->buf[1] = pict->color_trc;
435  s->buf[2] = 0; /* colorspace = RGB */
436  s->buf[3] = pict->color_range == AVCOL_RANGE_MPEG ? 0 : 1;
437  png_write_chunk(&s->bytestream, MKTAG('c', 'I', 'C', 'P'), s->buf, 4);
438  }
439 
440  if (png_get_chrm(pict->color_primaries, s->buf))
441  png_write_chunk(&s->bytestream, MKTAG('c', 'H', 'R', 'M'), s->buf, 32);
442  if (png_get_gama(pict->color_trc, s->buf))
443  png_write_chunk(&s->bytestream, MKTAG('g', 'A', 'M', 'A'), s->buf, 4);
444 
445  if (avctx->bits_per_raw_sample > 0 && avctx->bits_per_raw_sample < s->bit_depth) {
446  int len = ff_png_get_nb_channels(s->color_type);
447  memset(s->buf, avctx->bits_per_raw_sample, len);
448  png_write_chunk(&s->bytestream, MKTAG('s', 'B', 'I', 'T'), s->buf, len);
449  }
450 
451  /* put the palette if needed, must be after colorspace information */
452  if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
453  int has_alpha, alpha, i;
454  unsigned int v;
455  uint32_t *palette;
456  uint8_t *ptr, *alpha_ptr;
457 
458  palette = (uint32_t *)pict->data[1];
459  ptr = s->buf;
460  alpha_ptr = s->buf + 256 * 3;
461  has_alpha = 0;
462  for (i = 0; i < 256; i++) {
463  v = palette[i];
464  alpha = v >> 24;
465  if (alpha != 0xff)
466  has_alpha = 1;
467  *alpha_ptr++ = alpha;
468  bytestream_put_be24(&ptr, v);
469  }
470  png_write_chunk(&s->bytestream,
471  MKTAG('P', 'L', 'T', 'E'), s->buf, 256 * 3);
472  if (has_alpha) {
473  png_write_chunk(&s->bytestream,
474  MKTAG('t', 'R', 'N', 'S'), s->buf + 256 * 3, 256);
475  }
476  }
477 
478  return 0;
479 }
480 
481 static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
482 {
483  PNGEncContext *s = avctx->priv_data;
484  z_stream *const zstream = &s->zstream.zstream;
485  const AVFrame *const p = pict;
486  int y, len, ret;
487  int row_size, pass_row_size;
488  uint8_t *crow_buf, *crow;
489  uint8_t *crow_base = NULL;
490  uint8_t *progressive_buf = NULL;
491  uint8_t *top_buf = NULL;
492 
493  row_size = (pict->width * s->bits_per_pixel + 7) >> 3;
494 
495  crow_base = av_malloc((row_size + 32) << (s->filter_type == PNG_FILTER_VALUE_MIXED));
496  if (!crow_base) {
497  ret = AVERROR(ENOMEM);
498  goto the_end;
499  }
500  // pixel data should be aligned, but there's a control byte before it
501  crow_buf = crow_base + 15;
502  if (s->is_progressive) {
503  progressive_buf = av_malloc(row_size + 1);
504  top_buf = av_malloc(row_size + 1);
505  if (!progressive_buf || !top_buf) {
506  ret = AVERROR(ENOMEM);
507  goto the_end;
508  }
509  }
510 
511  /* put each row */
512  zstream->avail_out = IOBUF_SIZE;
513  zstream->next_out = s->buf;
514  if (s->is_progressive) {
515  int pass;
516 
517  for (pass = 0; pass < NB_PASSES; pass++) {
518  /* NOTE: a pass is completely omitted if no pixels would be
519  * output */
520  pass_row_size = ff_png_pass_row_size(pass, s->bits_per_pixel, pict->width);
521  if (pass_row_size > 0) {
522  uint8_t *top = NULL;
523  for (y = 0; y < pict->height; y++)
524  if ((ff_png_pass_ymask[pass] << (y & 7)) & 0x80) {
525  const uint8_t *ptr = p->data[0] + y * p->linesize[0];
526  FFSWAP(uint8_t *, progressive_buf, top_buf);
527  png_get_interlaced_row(progressive_buf, pass_row_size,
528  s->bits_per_pixel, pass,
529  ptr, pict->width);
530  crow = png_choose_filter(s, crow_buf, progressive_buf,
531  top, pass_row_size, s->bits_per_pixel >> 3);
532  png_write_row(avctx, crow, pass_row_size + 1);
533  top = progressive_buf;
534  }
535  }
536  }
537  } else {
538  const uint8_t *top = NULL;
539  for (y = 0; y < pict->height; y++) {
540  const uint8_t *ptr = p->data[0] + y * p->linesize[0];
541  crow = png_choose_filter(s, crow_buf, ptr, top,
542  row_size, s->bits_per_pixel >> 3);
543  png_write_row(avctx, crow, row_size + 1);
544  top = ptr;
545  }
546  }
547  /* compress last bytes */
548  for (;;) {
549  ret = deflate(zstream, Z_FINISH);
550  if (ret == Z_OK || ret == Z_STREAM_END) {
551  len = IOBUF_SIZE - zstream->avail_out;
552  if (len > 0 && s->bytestream_end - s->bytestream > len + 100) {
553  png_write_image_data(avctx, s->buf, len);
554  }
555  zstream->avail_out = IOBUF_SIZE;
556  zstream->next_out = s->buf;
557  if (ret == Z_STREAM_END)
558  break;
559  } else {
560  ret = -1;
561  goto the_end;
562  }
563  }
564 
565  ret = 0;
566 
567 the_end:
568  av_freep(&crow_base);
569  av_freep(&progressive_buf);
570  av_freep(&top_buf);
571  deflateReset(zstream);
572  return ret;
573 }
574 
575 static int add_icc_profile_size(AVCodecContext *avctx, const AVFrame *pict,
576  uint64_t *max_packet_size)
577 {
578  PNGEncContext *s = avctx->priv_data;
579  const AVFrameSideData *sd;
580  const int hdr_size = 128;
581  uint64_t new_pkt_size;
582  uLong bound;
583 
584  if (!pict)
585  return 0;
587  if (!sd || !sd->size)
588  return 0;
589  if (sd->size != (uLong) sd->size)
590  return AVERROR_INVALIDDATA;
591 
592  bound = deflateBound(&s->zstream.zstream, sd->size);
593  if (bound > INT32_MAX - hdr_size)
594  return AVERROR_INVALIDDATA;
595 
596  new_pkt_size = *max_packet_size + bound + hdr_size;
597  if (new_pkt_size < *max_packet_size)
598  return AVERROR_INVALIDDATA;
599  *max_packet_size = new_pkt_size;
600  return 0;
601 }
602 
603 static int encode_png(AVCodecContext *avctx, AVPacket *pkt,
604  const AVFrame *pict, int *got_packet)
605 {
606  PNGEncContext *s = avctx->priv_data;
607  int ret;
608  int enc_row_size;
609  uint64_t max_packet_size;
610 
611  enc_row_size = deflateBound(&s->zstream.zstream,
612  (avctx->width * s->bits_per_pixel + 7) >> 3);
613  max_packet_size =
614  AV_INPUT_BUFFER_MIN_SIZE + // headers
615  avctx->height * (
616  enc_row_size +
617  12 * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // IDAT * ceil(enc_row_size / IOBUF_SIZE)
618  );
619  if ((ret = add_icc_profile_size(avctx, pict, &max_packet_size)))
620  return ret;
621  ret = ff_alloc_packet(avctx, pkt, max_packet_size);
622  if (ret < 0)
623  return ret;
624 
625  s->bytestream_start =
626  s->bytestream = pkt->data;
627  s->bytestream_end = pkt->data + pkt->size;
628 
629  AV_WB64(s->bytestream, PNGSIG);
630  s->bytestream += 8;
631 
632  ret = encode_headers(avctx, pict);
633  if (ret < 0)
634  return ret;
635 
636  ret = encode_frame(avctx, pict);
637  if (ret < 0)
638  return ret;
639 
640  png_write_chunk(&s->bytestream, MKTAG('I', 'E', 'N', 'D'), NULL, 0);
641 
642  pkt->size = s->bytestream - s->bytestream_start;
644  *got_packet = 1;
645 
646  return 0;
647 }
648 
650  APNGFctlChunk *fctl_chunk, uint8_t bpp)
651 {
652  // output: background, input: foreground
653  // output the image such that when blended with the background, will produce the foreground
654 
655  unsigned int x, y;
656  unsigned int leftmost_x = input->width;
657  unsigned int rightmost_x = 0;
658  unsigned int topmost_y = input->height;
659  unsigned int bottommost_y = 0;
660  const uint8_t *input_data = input->data[0];
661  uint8_t *output_data = output->data[0];
662  ptrdiff_t input_linesize = input->linesize[0];
663  ptrdiff_t output_linesize = output->linesize[0];
664 
665  // Find bounding box of changes
666  for (y = 0; y < input->height; ++y) {
667  for (x = 0; x < input->width; ++x) {
668  if (!memcmp(input_data + bpp * x, output_data + bpp * x, bpp))
669  continue;
670 
671  if (x < leftmost_x)
672  leftmost_x = x;
673  if (x >= rightmost_x)
674  rightmost_x = x + 1;
675  if (y < topmost_y)
676  topmost_y = y;
677  if (y >= bottommost_y)
678  bottommost_y = y + 1;
679  }
680 
681  input_data += input_linesize;
682  output_data += output_linesize;
683  }
684 
685  if (leftmost_x == input->width && rightmost_x == 0) {
686  // Empty frame
687  // APNG does not support empty frames, so we make it a 1x1 frame
688  leftmost_x = topmost_y = 0;
689  rightmost_x = bottommost_y = 1;
690  }
691 
692  // Do actual inverse blending
693  if (fctl_chunk->blend_op == APNG_BLEND_OP_SOURCE) {
694  output_data = output->data[0];
695  for (y = topmost_y; y < bottommost_y; ++y) {
696  memcpy(output_data,
697  input->data[0] + input_linesize * y + bpp * leftmost_x,
698  bpp * (rightmost_x - leftmost_x));
699  output_data += output_linesize;
700  }
701  } else { // APNG_BLEND_OP_OVER
702  size_t transparent_palette_index;
703  uint32_t *palette;
704 
705  switch (input->format) {
706  case AV_PIX_FMT_RGBA64BE:
707  case AV_PIX_FMT_YA16BE:
708  case AV_PIX_FMT_RGBA:
709  case AV_PIX_FMT_GRAY8A:
710  break;
711 
712  case AV_PIX_FMT_PAL8:
713  palette = (uint32_t*)input->data[1];
714  for (transparent_palette_index = 0; transparent_palette_index < 256; ++transparent_palette_index)
715  if (palette[transparent_palette_index] >> 24 == 0)
716  break;
717  break;
718 
719  default:
720  // No alpha, so blending not possible
721  return -1;
722  }
723 
724  for (y = topmost_y; y < bottommost_y; ++y) {
725  const uint8_t *foreground = input->data[0] + input_linesize * y + bpp * leftmost_x;
726  uint8_t *background = output->data[0] + output_linesize * y + bpp * leftmost_x;
727  output_data = output->data[0] + output_linesize * (y - topmost_y);
728  for (x = leftmost_x; x < rightmost_x; ++x, foreground += bpp, background += bpp, output_data += bpp) {
729  if (!memcmp(foreground, background, bpp)) {
730  if (input->format == AV_PIX_FMT_PAL8) {
731  if (transparent_palette_index == 256) {
732  // Need fully transparent colour, but none exists
733  return -1;
734  }
735 
736  *output_data = transparent_palette_index;
737  } else {
738  memset(output_data, 0, bpp);
739  }
740  continue;
741  }
742 
743  // Check for special alpha values, since full inverse
744  // alpha-on-alpha blending is rarely possible, and when
745  // possible, doesn't compress much better than
746  // APNG_BLEND_OP_SOURCE blending
747  switch (input->format) {
748  case AV_PIX_FMT_RGBA64BE:
749  if (((uint16_t*)foreground)[3] == 0xffff ||
750  ((uint16_t*)background)[3] == 0)
751  break;
752  return -1;
753 
754  case AV_PIX_FMT_YA16BE:
755  if (((uint16_t*)foreground)[1] == 0xffff ||
756  ((uint16_t*)background)[1] == 0)
757  break;
758  return -1;
759 
760  case AV_PIX_FMT_RGBA:
761  if (foreground[3] == 0xff || background[3] == 0)
762  break;
763  return -1;
764 
765  case AV_PIX_FMT_GRAY8A:
766  if (foreground[1] == 0xff || background[1] == 0)
767  break;
768  return -1;
769 
770  case AV_PIX_FMT_PAL8:
771  if (palette[*foreground] >> 24 == 0xff ||
772  palette[*background] >> 24 == 0)
773  break;
774  return -1;
775  }
776 
777  memmove(output_data, foreground, bpp);
778  }
779  }
780  }
781 
782  output->width = rightmost_x - leftmost_x;
783  output->height = bottommost_y - topmost_y;
784  fctl_chunk->width = output->width;
785  fctl_chunk->height = output->height;
786  fctl_chunk->x_offset = leftmost_x;
787  fctl_chunk->y_offset = topmost_y;
788 
789  return 0;
790 }
791 
792 static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict,
793  APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
794 {
795  PNGEncContext *s = avctx->priv_data;
796  int ret;
797  unsigned int y;
798  AVFrame* diffFrame;
799  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
800  uint8_t *original_bytestream, *original_bytestream_end;
801  uint8_t *temp_bytestream = 0, *temp_bytestream_end;
802  uint32_t best_sequence_number;
803  uint8_t *best_bytestream;
804  size_t best_bytestream_size = SIZE_MAX;
805  APNGFctlChunk last_fctl_chunk = *best_last_fctl_chunk;
806  APNGFctlChunk fctl_chunk = *best_fctl_chunk;
807 
808  if (avctx->frame_num == 0) {
809  best_fctl_chunk->width = pict->width;
810  best_fctl_chunk->height = pict->height;
811  best_fctl_chunk->x_offset = 0;
812  best_fctl_chunk->y_offset = 0;
813  best_fctl_chunk->blend_op = APNG_BLEND_OP_SOURCE;
814  return encode_frame(avctx, pict);
815  }
816 
817  diffFrame = av_frame_alloc();
818  if (!diffFrame)
819  return AVERROR(ENOMEM);
820 
821  diffFrame->format = pict->format;
822  diffFrame->width = pict->width;
823  diffFrame->height = pict->height;
824  if ((ret = av_frame_get_buffer(diffFrame, 0)) < 0)
825  goto fail;
826 
827  original_bytestream = s->bytestream;
828  original_bytestream_end = s->bytestream_end;
829 
830  temp_bytestream = av_malloc(original_bytestream_end - original_bytestream);
831  if (!temp_bytestream) {
832  ret = AVERROR(ENOMEM);
833  goto fail;
834  }
835  temp_bytestream_end = temp_bytestream + (original_bytestream_end - original_bytestream);
836 
837  for (last_fctl_chunk.dispose_op = 0; last_fctl_chunk.dispose_op < 3; ++last_fctl_chunk.dispose_op) {
838  // 0: APNG_DISPOSE_OP_NONE
839  // 1: APNG_DISPOSE_OP_BACKGROUND
840  // 2: APNG_DISPOSE_OP_PREVIOUS
841 
842  for (fctl_chunk.blend_op = 0; fctl_chunk.blend_op < 2; ++fctl_chunk.blend_op) {
843  // 0: APNG_BLEND_OP_SOURCE
844  // 1: APNG_BLEND_OP_OVER
845 
846  uint32_t original_sequence_number = s->sequence_number, sequence_number;
847  uint8_t *bytestream_start = s->bytestream;
848  size_t bytestream_size;
849 
850  // Do disposal
851  if (last_fctl_chunk.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
852  diffFrame->width = pict->width;
853  diffFrame->height = pict->height;
854  ret = av_frame_copy(diffFrame, s->last_frame);
855  if (ret < 0)
856  goto fail;
857 
858  if (last_fctl_chunk.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
859  for (y = last_fctl_chunk.y_offset; y < last_fctl_chunk.y_offset + last_fctl_chunk.height; ++y) {
860  size_t row_start = diffFrame->linesize[0] * y + bpp * last_fctl_chunk.x_offset;
861  memset(diffFrame->data[0] + row_start, 0, bpp * last_fctl_chunk.width);
862  }
863  }
864  } else {
865  if (!s->prev_frame)
866  continue;
867 
868  diffFrame->width = pict->width;
869  diffFrame->height = pict->height;
870  ret = av_frame_copy(diffFrame, s->prev_frame);
871  if (ret < 0)
872  goto fail;
873  }
874 
875  // Do inverse blending
876  if (apng_do_inverse_blend(diffFrame, pict, &fctl_chunk, bpp) < 0)
877  continue;
878 
879  // Do encoding
880  ret = encode_frame(avctx, diffFrame);
881  sequence_number = s->sequence_number;
882  s->sequence_number = original_sequence_number;
883  bytestream_size = s->bytestream - bytestream_start;
884  s->bytestream = bytestream_start;
885  if (ret < 0)
886  goto fail;
887 
888  if (bytestream_size < best_bytestream_size) {
889  *best_fctl_chunk = fctl_chunk;
890  *best_last_fctl_chunk = last_fctl_chunk;
891 
892  best_sequence_number = sequence_number;
893  best_bytestream = s->bytestream;
894  best_bytestream_size = bytestream_size;
895 
896  if (best_bytestream == original_bytestream) {
897  s->bytestream = temp_bytestream;
898  s->bytestream_end = temp_bytestream_end;
899  } else {
900  s->bytestream = original_bytestream;
901  s->bytestream_end = original_bytestream_end;
902  }
903  }
904  }
905  }
906 
907  s->sequence_number = best_sequence_number;
908  s->bytestream = original_bytestream + best_bytestream_size;
909  s->bytestream_end = original_bytestream_end;
910  if (best_bytestream != original_bytestream)
911  memcpy(original_bytestream, best_bytestream, best_bytestream_size);
912 
913  ret = 0;
914 
915 fail:
916  av_freep(&temp_bytestream);
917  av_frame_free(&diffFrame);
918  return ret;
919 }
920 
922  const AVFrame *pict, int *got_packet)
923 {
924  PNGEncContext *s = avctx->priv_data;
925  int ret;
926  int enc_row_size;
927  uint64_t max_packet_size;
928  APNGFctlChunk fctl_chunk = {0};
929 
930  if (pict && s->color_type == PNG_COLOR_TYPE_PALETTE) {
931  uint32_t checksum = ~av_crc(av_crc_get_table(AV_CRC_32_IEEE_LE), ~0U, pict->data[1], 256 * sizeof(uint32_t));
932 
933  if (avctx->frame_num == 0) {
934  s->palette_checksum = checksum;
935  } else if (checksum != s->palette_checksum) {
936  av_log(avctx, AV_LOG_ERROR,
937  "Input contains more than one unique palette. APNG does not support multiple palettes.\n");
938  return -1;
939  }
940  }
941 
942  enc_row_size = deflateBound(&s->zstream.zstream,
943  (avctx->width * s->bits_per_pixel + 7) >> 3);
944  max_packet_size =
945  AV_INPUT_BUFFER_MIN_SIZE + // headers
946  avctx->height * (
947  enc_row_size +
948  (4 + 12) * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // fdAT * ceil(enc_row_size / IOBUF_SIZE)
949  );
950  if ((ret = add_icc_profile_size(avctx, pict, &max_packet_size)))
951  return ret;
952  if (max_packet_size > INT_MAX)
953  return AVERROR(ENOMEM);
954 
955  if (avctx->frame_num == 0) {
956  if (!pict)
957  return AVERROR(EINVAL);
958 
959  s->bytestream = s->extra_data = av_malloc(AV_INPUT_BUFFER_MIN_SIZE);
960  if (!s->extra_data)
961  return AVERROR(ENOMEM);
962 
963  ret = encode_headers(avctx, pict);
964  if (ret < 0)
965  return ret;
966 
967  s->extra_data_size = s->bytestream - s->extra_data;
968 
969  s->last_frame_packet = av_malloc(max_packet_size);
970  if (!s->last_frame_packet)
971  return AVERROR(ENOMEM);
972  } else if (s->last_frame) {
973  ret = ff_get_encode_buffer(avctx, pkt, s->last_frame_packet_size, 0);
974  if (ret < 0)
975  return ret;
976 
977  memcpy(pkt->data, s->last_frame_packet, s->last_frame_packet_size);
978  pkt->pts = s->last_frame->pts;
979  pkt->duration = s->last_frame->duration;
980 
981  ret = ff_encode_reordered_opaque(avctx, pkt, s->last_frame);
982  if (ret < 0)
983  return ret;
984  }
985 
986  if (pict) {
987  s->bytestream_start =
988  s->bytestream = s->last_frame_packet;
989  s->bytestream_end = s->bytestream + max_packet_size;
990 
991  // We're encoding the frame first, so we have to do a bit of shuffling around
992  // to have the image data write to the correct place in the buffer
993  fctl_chunk.sequence_number = s->sequence_number;
994  ++s->sequence_number;
995  s->bytestream += APNG_FCTL_CHUNK_SIZE + 12;
996 
997  ret = apng_encode_frame(avctx, pict, &fctl_chunk, &s->last_frame_fctl);
998  if (ret < 0)
999  return ret;
1000 
1001  fctl_chunk.delay_num = 0; // delay filled in during muxing
1002  fctl_chunk.delay_den = 0;
1003  } else {
1004  s->last_frame_fctl.dispose_op = APNG_DISPOSE_OP_NONE;
1005  }
1006 
1007  if (s->last_frame) {
1008  uint8_t* last_fctl_chunk_start = pkt->data;
1009  uint8_t buf[APNG_FCTL_CHUNK_SIZE];
1010  if (!s->extra_data_updated) {
1011  uint8_t *side_data = av_packet_new_side_data(pkt, AV_PKT_DATA_NEW_EXTRADATA, s->extra_data_size);
1012  if (!side_data)
1013  return AVERROR(ENOMEM);
1014  memcpy(side_data, s->extra_data, s->extra_data_size);
1015  s->extra_data_updated = 1;
1016  }
1017 
1018  AV_WB32(buf + 0, s->last_frame_fctl.sequence_number);
1019  AV_WB32(buf + 4, s->last_frame_fctl.width);
1020  AV_WB32(buf + 8, s->last_frame_fctl.height);
1021  AV_WB32(buf + 12, s->last_frame_fctl.x_offset);
1022  AV_WB32(buf + 16, s->last_frame_fctl.y_offset);
1023  AV_WB16(buf + 20, s->last_frame_fctl.delay_num);
1024  AV_WB16(buf + 22, s->last_frame_fctl.delay_den);
1025  buf[24] = s->last_frame_fctl.dispose_op;
1026  buf[25] = s->last_frame_fctl.blend_op;
1027  png_write_chunk(&last_fctl_chunk_start, MKTAG('f', 'c', 'T', 'L'), buf, sizeof(buf));
1028 
1029  *got_packet = 1;
1030  }
1031 
1032  if (pict) {
1033  if (!s->last_frame) {
1034  s->last_frame = av_frame_alloc();
1035  if (!s->last_frame)
1036  return AVERROR(ENOMEM);
1037  } else if (s->last_frame_fctl.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
1038  if (!s->prev_frame) {
1039  s->prev_frame = av_frame_alloc();
1040  if (!s->prev_frame)
1041  return AVERROR(ENOMEM);
1042 
1043  s->prev_frame->format = pict->format;
1044  s->prev_frame->width = pict->width;
1045  s->prev_frame->height = pict->height;
1046  if ((ret = av_frame_get_buffer(s->prev_frame, 0)) < 0)
1047  return ret;
1048  }
1049 
1050  // Do disposal, but not blending
1051  av_frame_copy(s->prev_frame, s->last_frame);
1052  if (s->last_frame_fctl.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
1053  uint32_t y;
1054  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
1055  for (y = s->last_frame_fctl.y_offset; y < s->last_frame_fctl.y_offset + s->last_frame_fctl.height; ++y) {
1056  size_t row_start = s->prev_frame->linesize[0] * y + bpp * s->last_frame_fctl.x_offset;
1057  memset(s->prev_frame->data[0] + row_start, 0, bpp * s->last_frame_fctl.width);
1058  }
1059  }
1060  }
1061 
1062  ret = av_frame_replace(s->last_frame, pict);
1063  if (ret < 0)
1064  return ret;
1065 
1066  s->last_frame_fctl = fctl_chunk;
1067  s->last_frame_packet_size = s->bytestream - s->bytestream_start;
1068  } else {
1069  av_frame_free(&s->last_frame);
1070  }
1071 
1072  return 0;
1073 }
1074 
1076 {
1077  PNGEncContext *s = avctx->priv_data;
1078  int compression_level;
1079 
1080  switch (avctx->pix_fmt) {
1081  case AV_PIX_FMT_RGBA:
1082  avctx->bits_per_coded_sample = 32;
1083  break;
1084  case AV_PIX_FMT_RGB24:
1085  avctx->bits_per_coded_sample = 24;
1086  break;
1087  case AV_PIX_FMT_GRAY8:
1088  avctx->bits_per_coded_sample = 0x28;
1089  break;
1090  case AV_PIX_FMT_MONOBLACK:
1091  avctx->bits_per_coded_sample = 1;
1092  break;
1093  case AV_PIX_FMT_PAL8:
1094  avctx->bits_per_coded_sample = 8;
1095  }
1096 
1097  ff_llvidencdsp_init(&s->llvidencdsp);
1098 
1099  if (avctx->pix_fmt == AV_PIX_FMT_MONOBLACK)
1100  s->filter_type = PNG_FILTER_VALUE_NONE;
1101 
1102  if (s->dpi && s->dpm) {
1103  av_log(avctx, AV_LOG_ERROR, "Only one of 'dpi' or 'dpm' options should be set\n");
1104  return AVERROR(EINVAL);
1105  } else if (s->dpi) {
1106  s->dpm = s->dpi * 10000 / 254;
1107  }
1108 
1109  s->is_progressive = !!(avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT);
1110  switch (avctx->pix_fmt) {
1111  case AV_PIX_FMT_RGBA64BE:
1112  s->bit_depth = 16;
1113  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1114  break;
1115  case AV_PIX_FMT_RGB48BE:
1116  s->bit_depth = 16;
1117  s->color_type = PNG_COLOR_TYPE_RGB;
1118  break;
1119  case AV_PIX_FMT_RGBA:
1120  s->bit_depth = 8;
1121  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1122  break;
1123  case AV_PIX_FMT_RGB24:
1124  s->bit_depth = 8;
1125  s->color_type = PNG_COLOR_TYPE_RGB;
1126  break;
1127  case AV_PIX_FMT_GRAY16BE:
1128  s->bit_depth = 16;
1129  s->color_type = PNG_COLOR_TYPE_GRAY;
1130  break;
1131  case AV_PIX_FMT_GRAY8:
1132  s->bit_depth = 8;
1133  s->color_type = PNG_COLOR_TYPE_GRAY;
1134  break;
1135  case AV_PIX_FMT_GRAY8A:
1136  s->bit_depth = 8;
1137  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1138  break;
1139  case AV_PIX_FMT_YA16BE:
1140  s->bit_depth = 16;
1141  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1142  break;
1143  case AV_PIX_FMT_MONOBLACK:
1144  s->bit_depth = 1;
1145  s->color_type = PNG_COLOR_TYPE_GRAY;
1146  break;
1147  case AV_PIX_FMT_PAL8:
1148  s->bit_depth = 8;
1149  s->color_type = PNG_COLOR_TYPE_PALETTE;
1150  break;
1151  default:
1152  return -1;
1153  }
1154  s->bits_per_pixel = ff_png_get_nb_channels(s->color_type) * s->bit_depth;
1155 
1156  compression_level = avctx->compression_level == FF_COMPRESSION_DEFAULT
1157  ? Z_DEFAULT_COMPRESSION
1158  : av_clip(avctx->compression_level, 0, 9);
1159  return ff_deflate_init(&s->zstream, compression_level, avctx);
1160 }
1161 
1163 {
1164  PNGEncContext *s = avctx->priv_data;
1165 
1166  ff_deflate_end(&s->zstream);
1167  av_frame_free(&s->last_frame);
1168  av_frame_free(&s->prev_frame);
1169  av_freep(&s->last_frame_packet);
1170  av_freep(&s->extra_data);
1171  s->extra_data_size = 0;
1172  return 0;
1173 }
1174 
1175 #define OFFSET(x) offsetof(PNGEncContext, x)
1176 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
1177 static const AVOption options[] = {
1178  {"dpi", "Set image resolution (in dots per inch)", OFFSET(dpi), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1179  {"dpm", "Set image resolution (in dots per meter)", OFFSET(dpm), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1180  { "pred", "Prediction method", OFFSET(filter_type), AV_OPT_TYPE_INT, { .i64 = PNG_FILTER_VALUE_NONE }, PNG_FILTER_VALUE_NONE, PNG_FILTER_VALUE_MIXED, VE, "pred" },
1181  { "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_NONE }, INT_MIN, INT_MAX, VE, "pred" },
1182  { "sub", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_SUB }, INT_MIN, INT_MAX, VE, "pred" },
1183  { "up", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_UP }, INT_MIN, INT_MAX, VE, "pred" },
1184  { "avg", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_AVG }, INT_MIN, INT_MAX, VE, "pred" },
1185  { "paeth", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_PAETH }, INT_MIN, INT_MAX, VE, "pred" },
1186  { "mixed", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_MIXED }, INT_MIN, INT_MAX, VE, "pred" },
1187  { NULL},
1188 };
1189 
1190 static const AVClass pngenc_class = {
1191  .class_name = "(A)PNG encoder",
1192  .item_name = av_default_item_name,
1193  .option = options,
1194  .version = LIBAVUTIL_VERSION_INT,
1195 };
1196 
1198  .p.name = "png",
1199  CODEC_LONG_NAME("PNG (Portable Network Graphics) image"),
1200  .p.type = AVMEDIA_TYPE_VIDEO,
1201  .p.id = AV_CODEC_ID_PNG,
1202  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS |
1204  .priv_data_size = sizeof(PNGEncContext),
1205  .init = png_enc_init,
1206  .close = png_enc_close,
1208  .p.pix_fmts = (const enum AVPixelFormat[]) {
1215  },
1216  .p.priv_class = &pngenc_class,
1217  .caps_internal = FF_CODEC_CAP_ICC_PROFILES,
1218 };
1219 
1221  .p.name = "apng",
1222  CODEC_LONG_NAME("APNG (Animated Portable Network Graphics) image"),
1223  .p.type = AVMEDIA_TYPE_VIDEO,
1224  .p.id = AV_CODEC_ID_APNG,
1225  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY |
1227  .priv_data_size = sizeof(PNGEncContext),
1228  .init = png_enc_init,
1229  .close = png_enc_close,
1231  .p.pix_fmts = (const enum AVPixelFormat[]) {
1238  },
1239  .p.priv_class = &pngenc_class,
1240  .caps_internal = FF_CODEC_CAP_ICC_PROFILES,
1241 };
AVFrame::color_trc
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:660
ff_encode_reordered_opaque
int ff_encode_reordered_opaque(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *frame)
Propagate user opaque values from the frame to avctx/pkt as needed.
Definition: encode.c:239
encode_frame
static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:481
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVFrame::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:656
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
av_clip
#define av_clip
Definition: common.h:96
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
PNGEncContext::buf
uint8_t buf[IOBUF_SIZE]
Definition: pngenc.c:62
AV_WL32
#define AV_WL32(p, v)
Definition: intreadwrite.h:424
AVColorTransferCharacteristic
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:570
libm.h
ff_png_encoder
const FFCodec ff_png_encoder
Definition: pngenc.c:1197
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:243
av_frame_get_side_data
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:824
AV_WB32_PNG
#define AV_WB32_PNG(buf, n)
Definition: pngenc.c:297
AVColorPrimariesDesc
Struct that contains both white point location and primaries location, providing the complete descrip...
Definition: csp.h:78
AVCRC
uint32_t AVCRC
Definition: crc.h:46
png_get_chrm
static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
Definition: pngenc.c:299
AV_PKT_DATA_NEW_EXTRADATA
@ AV_PKT_DATA_NEW_EXTRADATA
The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was...
Definition: packet.h:56
APNG_FCTL_CHUNK_SIZE
#define APNG_FCTL_CHUNK_SIZE
Definition: apng.h:42
ff_png_get_nb_channels
int ff_png_get_nb_channels(int color_type)
Definition: png.c:41
PNGEncContext::bits_per_pixel
int bits_per_pixel
Definition: pngenc.c:69
APNG_DISPOSE_OP_NONE
@ APNG_DISPOSE_OP_NONE
Definition: apng.h:31
src1
const pixel * src1
Definition: h264pred_template.c:421
rational.h
PNGEncContext::last_frame
AVFrame * last_frame
Definition: pngenc.c:79
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
AVFrame::color_primaries
enum AVColorPrimaries color_primaries
Definition: frame.h:658
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:100
apng_encode_frame
static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict, APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
Definition: pngenc.c:792
APNGFctlChunk::delay_num
uint16_t delay_num
Definition: pngenc.c:47
test::height
int height
Definition: vc1dsp.c:39
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:340
AV_PIX_FMT_RGBA64BE
@ AV_PIX_FMT_RGBA64BE
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
Definition: pixfmt.h:195
AVFrame::width
int width
Definition: frame.h:412
PNG_FILTER_VALUE_MIXED
#define PNG_FILTER_VALUE_MIXED
Definition: png.h:45
w
uint8_t w
Definition: llviddspenc.c:38
AVPacket::data
uint8_t * data
Definition: packet.h:491
AVOption
AVOption.
Definition: opt.h:251
encode.h
b
#define b
Definition: input.c:41
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:573
data
const char data[16]
Definition: mxf.c:148
png_write_row
static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
Definition: pngenc.c:275
FFCodec
Definition: codec_internal.h:127
output_data
static int output_data(MLPDecodeContext *m, unsigned int substr, AVFrame *frame, int *got_frame_ptr)
Write the audio data into the output buffer.
Definition: mlpdec.c:1092
PNGEncContext::dpm
int dpm
Physical pixel density, in dots per meter, if set.
Definition: pngenc.c:64
AVPacket::duration
int64_t duration
Duration of this packet in AVStream->time_base units, 0 if unknown.
Definition: packet.h:509
png_get_gama
static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
Definition: pngenc.c:317
PNGEncContext::last_frame_packet
uint8_t * last_frame_packet
Definition: pngenc.c:81
AVColorPrimaries
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:545
ff_deflate_end
void ff_deflate_end(FFZStream *zstream)
Wrapper around deflateEnd().
AV_CODEC_ID_APNG
@ AV_CODEC_ID_APNG
Definition: codec_id.h:268
FF_COMPRESSION_DEFAULT
#define FF_COMPRESSION_DEFAULT
Definition: avcodec.h:514
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:546
AV_WB64
#define AV_WB64(p, v)
Definition: intreadwrite.h:431
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:361
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
NB_PASSES
#define NB_PASSES
Definition: png.h:47
crc.h
ff_apng_encoder
const FFCodec ff_apng_encoder
Definition: pngenc.c:1220
sub_png_paeth_prediction
static void sub_png_paeth_prediction(uint8_t *dst, const uint8_t *src, const uint8_t *top, int w, int bpp)
Definition: pngenc.c:124
AV_PIX_FMT_GRAY16BE
@ AV_PIX_FMT_GRAY16BE
Y , 16bpp, big-endian.
Definition: pixfmt.h:97
AV_STEREO3D_SIDEBYSIDE
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
Definition: stereo3d.h:64
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
PNGEncContext::prev_frame
AVFrame * prev_frame
Definition: pngenc.c:78
AVCOL_TRC_IEC61966_2_1
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:584
ff_png_pass_row_size
int ff_png_pass_row_size(int pass, int bits_per_pixel, int width)
Definition: png.c:54
fail
#define fail()
Definition: checkasm.h:138
AV_STEREO3D_2D
@ AV_STEREO3D_2D
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:52
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:521
APNGFctlChunk::blend_op
uint8_t blend_op
Definition: pngenc.c:48
FF_CODEC_ENCODE_CB
#define FF_CODEC_ENCODE_CB(func)
Definition: codec_internal.h:315
AVRational::num
int num
Numerator.
Definition: rational.h:59
encode_png
static int encode_png(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:603
PNG_COLOR_TYPE_RGB_ALPHA
#define PNG_COLOR_TYPE_RGB_ALPHA
Definition: png.h:36
AV_CODEC_FLAG_INTERLACED_DCT
#define AV_CODEC_FLAG_INTERLACED_DCT
Use interlaced DCT.
Definition: avcodec.h:326
png_filter_row
static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type, const uint8_t *src, const uint8_t *top, int size, int bpp)
Definition: pngenc.c:168
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:88
avassert.h
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
zlib_wrapper.h
AVFrameSideData::size
size_t size
Definition: frame.h:249
av_cold
#define av_cold
Definition: attributes.h:90
encode_apng
static int encode_apng(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:921
mask
static const uint16_t mask[17]
Definition: lzw.c:38
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:62
PNGEncContext::bytestream_end
uint8_t * bytestream_end
Definition: pngenc.c:57
width
#define width
stereo3d.h
s
#define s(width, name)
Definition: cbs_vp9.c:198
av_csp_primaries_desc_from_id
const AVColorPrimariesDesc * av_csp_primaries_desc_from_id(enum AVColorPrimaries prm)
Retrieves a complete gamut description from an enum constant describing the color primaries.
Definition: csp.c:90
png_write_chunk
static void png_write_chunk(uint8_t **f, uint32_t tag, const uint8_t *buf, int length)
Definition: pngenc.c:227
PNG_COLOR_TYPE_RGB
#define PNG_COLOR_TYPE_RGB
Definition: png.h:35
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts_bsf.c:365
AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE
#define AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE
This encoder can reorder user opaque values from input AVFrames and return them with corresponding ou...
Definition: codec.h:159
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
AVCodecContext::bits_per_raw_sample
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:1517
AV_INPUT_BUFFER_MIN_SIZE
#define AV_INPUT_BUFFER_MIN_SIZE
Definition: avcodec.h:195
png_write_image_data
static void png_write_image_data(AVCodecContext *avctx, const uint8_t *buf, int length)
Definition: pngenc.c:247
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:272
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:451
AVStereo3D::flags
int flags
Additional information about the frame packing.
Definition: stereo3d.h:182
AV_CODEC_ID_PNG
@ AV_CODEC_ID_PNG
Definition: codec_id.h:113
PNGEncContext
Definition: pngenc.c:51
APNGFctlChunk::y_offset
uint32_t y_offset
Definition: pngenc.c:46
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:110
AV_PIX_FMT_GRAY8A
@ AV_PIX_FMT_GRAY8A
alias for AV_PIX_FMT_YA8
Definition: pixfmt.h:136
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
APNGFctlChunk::delay_den
uint16_t delay_den
Definition: pngenc.c:47
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
apng.h
AV_WB16
#define AV_WB16(p, v)
Definition: intreadwrite.h:403
IOBUF_SIZE
#define IOBUF_SIZE
Definition: pngenc.c:41
AV_PIX_FMT_MONOBLACK
@ AV_PIX_FMT_MONOBLACK
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb.
Definition: pixfmt.h:76
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:547
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
apng_do_inverse_blend
static int apng_do_inverse_blend(AVFrame *output, const AVFrame *input, APNGFctlChunk *fctl_chunk, uint8_t bpp)
Definition: pngenc.c:649
APNGFctlChunk::width
uint32_t width
Definition: pngenc.c:45
png_enc_close
static av_cold int png_enc_close(AVCodecContext *avctx)
Definition: pngenc.c:1162
AV_FRAME_DATA_ICC_PROFILE
@ AV_FRAME_DATA_ICC_PROFILE
The data contains an ICC profile as an opaque octet buffer following the format described by ISO 1507...
Definition: frame.h:144
PNG_COLOR_TYPE_GRAY
#define PNG_COLOR_TYPE_GRAY
Definition: png.h:33
deflate
static void deflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:161
PNGEncContext::filter_type
int filter_type
Definition: pngenc.c:59
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
PNGEncContext::extra_data_updated
int extra_data_updated
Definition: pngenc.c:74
APNGFctlChunk
Definition: pngenc.c:43
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ff_png_pass_ymask
const uint8_t ff_png_pass_ymask[NB_PASSES]
Definition: png.c:27
ff_llvidencdsp_init
av_cold void ff_llvidencdsp_init(LLVidEncDSPContext *c)
Definition: lossless_videoencdsp.c:91
add_icc_profile_size
static int add_icc_profile_size(AVCodecContext *avctx, const AVFrame *pict, uint64_t *max_packet_size)
Definition: pngenc.c:575
APNGFctlChunk::sequence_number
uint32_t sequence_number
Definition: pngenc.c:44
AV_WB32
#define AV_WB32(p, v)
Definition: intreadwrite.h:417
PNGEncContext::zstream
FFZStream zstream
Definition: pngenc.c:61
test::width
int width
Definition: vc1dsp.c:38
PNG_FILTER_VALUE_NONE
#define PNG_FILTER_VALUE_NONE
Definition: png.h:40
APNG_DISPOSE_OP_BACKGROUND
@ APNG_DISPOSE_OP_BACKGROUND
Definition: apng.h:32
f
f
Definition: af_crystalizer.c:121
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:68
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:492
codec_internal.h
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:899
av_bswap32
#define av_bswap32
Definition: bswap.h:28
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
AV_PIX_FMT_YA16BE
@ AV_PIX_FMT_YA16BE
16 bits gray, 16 bits alpha (big-endian)
Definition: pixfmt.h:202
PNGEncContext::last_frame_packet_size
size_t last_frame_packet_size
Definition: pngenc.c:82
PNG_FILTER_VALUE_AVG
#define PNG_FILTER_VALUE_AVG
Definition: png.h:43
size
int size
Definition: twinvq_data.h:10344
av_csp_approximate_trc_gamma
double av_csp_approximate_trc_gamma(enum AVColorTransferCharacteristic trc)
Determine a suitable 'gamma' value to match the supplied AVColorTransferCharacteristic.
Definition: csp.c:149
MKBETAG
#define MKBETAG(a, b, c, d)
Definition: macros.h:56
PNGEncContext::llvidencdsp
LLVidEncDSPContext llvidencdsp
Definition: pngenc.c:53
AVFrameSideData::data
uint8_t * data
Definition: frame.h:248
PNG_FILTER_VALUE_PAETH
#define PNG_FILTER_VALUE_PAETH
Definition: png.h:44
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:427
PNGEncContext::extra_data
uint8_t * extra_data
Definition: pngenc.c:75
png_choose_filter
static uint8_t * png_choose_filter(PNGEncContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *top, int size, int bpp)
Definition: pngenc.c:197
PNG_FILTER_VALUE_UP
#define PNG_FILTER_VALUE_UP
Definition: png.h:42
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
csp.h
av_crc_get_table
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
OFFSET
#define OFFSET(x)
Definition: pngenc.c:1175
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:497
AV_STEREO3D_FLAG_INVERT
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:164
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
PNGSIG
#define PNGSIG
Definition: png.h:49
lossless_videoencdsp.h
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1510
PNG_FILTER_VALUE_SUB
#define PNG_FILTER_VALUE_SUB
Definition: png.h:41
APNG_DISPOSE_OP_PREVIOUS
@ APNG_DISPOSE_OP_PREVIOUS
Definition: apng.h:33
AV_PIX_FMT_RGB48BE
@ AV_PIX_FMT_RGB48BE
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:102
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:484
options
static const AVOption options[]
Definition: pngenc.c:1177
src2
const pixel * src2
Definition: h264pred_template.c:422
AV_FRAME_DATA_STEREO3D
@ AV_FRAME_DATA_STEREO3D
Stereoscopic 3d metadata.
Definition: frame.h:64
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:194
len
int len
Definition: vorbis_enc_data.h:426
AVCodecContext::height
int height
Definition: avcodec.h:621
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:658
LLVidEncDSPContext
Definition: lossless_videoencdsp.h:25
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:656
FF_CODEC_CAP_ICC_PROFILES
#define FF_CODEC_CAP_ICC_PROFILES
Codec supports embedded ICC profiles (AV_FRAME_DATA_ICC_PROFILE).
Definition: codec_internal.h:82
sub_left_prediction
static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
Definition: pngenc.c:152
PNGEncContext::color_type
int color_type
Definition: pngenc.c:68
avcodec.h
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
AVCodecContext::frame_num
int64_t frame_num
Frame counter, set by libavcodec.
Definition: avcodec.h:2118
bound
static double bound(const double threshold, const double val)
Definition: af_dynaudnorm.c:413
tag
uint32_t tag
Definition: movenc.c:1737
ret
ret
Definition: filter_design.txt:187
pred
static const float pred[4]
Definition: siprdata.h:259
PNGEncContext::extra_data_size
int extra_data_size
Definition: pngenc.c:76
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
AVStereo3D::type
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:177
PNGEncContext::bit_depth
int bit_depth
Definition: pngenc.c:67
PNGEncContext::bytestream_start
uint8_t * bytestream_start
Definition: pngenc.c:56
U
#define U(x)
Definition: vpx_arith.h:37
av_frame_replace
int av_frame_replace(AVFrame *dst, const AVFrame *src)
Ensure the destination frame refers to the same data described by the source frame,...
Definition: frame.c:482
AVCodecContext
main external API structure.
Definition: avcodec.h:441
AVFrame::height
int height
Definition: frame.h:412
AV_WB32_PNG_D
#define AV_WB32_PNG_D(buf, d)
Definition: pngenc.c:298
av_packet_new_side_data
uint8_t * av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, size_t size)
Allocate new information of a packet.
Definition: avpacket.c:231
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:105
av_crc
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
png_get_interlaced_row
static void png_get_interlaced_row(uint8_t *dst, int row_size, int bits_per_pixel, int pass, const uint8_t *src, int width)
Definition: pngenc.c:85
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:76
AV_CRC_32_IEEE_LE
@ AV_CRC_32_IEEE_LE
Definition: crc.h:53
PNGEncContext::last_frame_fctl
APNGFctlChunk last_frame_fctl
Definition: pngenc.c:80
desc
const char * desc
Definition: libsvtav1.c:83
PNGEncContext::dpi
int dpi
Physical pixel density, in dots per inch, if set.
Definition: pngenc.c:63
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
FFZStream
Definition: zlib_wrapper.h:27
APNG_BLEND_OP_SOURCE
@ APNG_BLEND_OP_SOURCE
Definition: apng.h:37
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:246
png_enc_init
static av_cold int png_enc_init(AVCodecContext *avctx)
Definition: pngenc.c:1075
AVDictionaryEntry
Definition: dict.h:89
png_write_iccp
static int png_write_iccp(PNGEncContext *s, const AVFrameSideData *sd)
Definition: pngenc.c:327
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
AVPacket
This structure stores compressed data.
Definition: packet.h:468
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:468
png.h
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
d
d
Definition: ffmpeg_filter.c:368
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:621
bytestream.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:385
PNG_COLOR_TYPE_GRAY_ALPHA
#define PNG_COLOR_TYPE_GRAY_ALPHA
Definition: png.h:37
AVFrameSideData::metadata
AVDictionary * metadata
Definition: frame.h:250
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
APNGFctlChunk::height
uint32_t height
Definition: pngenc.c:45
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
MKTAG
#define MKTAG(a, b, c, d)
Definition: macros.h:55
AVStereo3D
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
Definition: stereo3d.h:173
AVDictionaryEntry::value
char * value
Definition: dict.h:91
input_data
static void input_data(MLPEncodeContext *ctx, MLPSubstream *s, uint8_t **const samples, int nb_samples)
Wrapper function for inputting data in two different bit-depths.
Definition: mlpenc.c:1224
PNGEncContext::bytestream
uint8_t * bytestream
Definition: pngenc.c:55
PNGEncContext::is_progressive
int is_progressive
Definition: pngenc.c:66
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
VE
#define VE
Definition: pngenc.c:1176
ff_alloc_packet
int ff_alloc_packet(AVCodecContext *avctx, AVPacket *avpkt, int64_t size)
Check AVPacket size and allocate data.
Definition: encode.c:61
encode_headers
static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:369
APNGFctlChunk::dispose_op
uint8_t dispose_op
Definition: pngenc.c:48
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:822
PNGEncContext::palette_checksum
uint32_t palette_checksum
Definition: pngenc.c:72
PNG_COLOR_TYPE_PALETTE
#define PNG_COLOR_TYPE_PALETTE
Definition: png.h:34
APNGFctlChunk::x_offset
uint32_t x_offset
Definition: pngenc.c:46
ff_deflate_init
int ff_deflate_init(FFZStream *zstream, int level, void *logctx)
Wrapper around deflateInit().
PNGEncContext::sequence_number
uint32_t sequence_number
Definition: pngenc.c:73
AVCodecContext::compression_level
int compression_level
Definition: avcodec.h:513
pngenc_class
static const AVClass pngenc_class
Definition: pngenc.c:1190