FFmpeg
pngenc.c
Go to the documentation of this file.
1 /*
2  * PNG image format
3  * Copyright (c) 2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include "avcodec.h"
23 #include "internal.h"
24 #include "bytestream.h"
25 #include "lossless_videoencdsp.h"
26 #include "png.h"
27 #include "apng.h"
28 
29 #include "libavutil/avassert.h"
30 #include "libavutil/crc.h"
31 #include "libavutil/libm.h"
32 #include "libavutil/opt.h"
33 #include "libavutil/color_utils.h"
34 #include "libavutil/stereo3d.h"
35 
36 #include <zlib.h>
37 
38 #define IOBUF_SIZE 4096
39 
40 typedef struct APNGFctlChunk {
41  uint32_t sequence_number;
42  uint32_t width, height;
43  uint32_t x_offset, y_offset;
44  uint16_t delay_num, delay_den;
47 
48 typedef struct PNGEncContext {
49  AVClass *class;
51 
55 
57 
58  z_stream zstream;
60  int dpi; ///< Physical pixel density, in dots per inch, if set
61  int dpm; ///< Physical pixel density, in dots per meter, if set
62 
64  int bit_depth;
67 
68  // APNG
69  uint32_t palette_checksum; // Used to ensure a single unique palette
70  uint32_t sequence_number;
74 
81 
82 static void png_get_interlaced_row(uint8_t *dst, int row_size,
83  int bits_per_pixel, int pass,
84  const uint8_t *src, int width)
85 {
86  int x, mask, dst_x, j, b, bpp;
87  uint8_t *d;
88  const uint8_t *s;
89  static const int masks[] = {0x80, 0x08, 0x88, 0x22, 0xaa, 0x55, 0xff};
90 
91  mask = masks[pass];
92  switch (bits_per_pixel) {
93  case 1:
94  memset(dst, 0, row_size);
95  dst_x = 0;
96  for (x = 0; x < width; x++) {
97  j = (x & 7);
98  if ((mask << j) & 0x80) {
99  b = (src[x >> 3] >> (7 - j)) & 1;
100  dst[dst_x >> 3] |= b << (7 - (dst_x & 7));
101  dst_x++;
102  }
103  }
104  break;
105  default:
106  bpp = bits_per_pixel >> 3;
107  d = dst;
108  s = src;
109  for (x = 0; x < width; x++) {
110  j = x & 7;
111  if ((mask << j) & 0x80) {
112  memcpy(d, s, bpp);
113  d += bpp;
114  }
115  s += bpp;
116  }
117  break;
118  }
119 }
120 
122  int w, int bpp)
123 {
124  int i;
125  for (i = 0; i < w; i++) {
126  int a, b, c, p, pa, pb, pc;
127 
128  a = src[i - bpp];
129  b = top[i];
130  c = top[i - bpp];
131 
132  p = b - c;
133  pc = a - c;
134 
135  pa = abs(p);
136  pb = abs(pc);
137  pc = abs(p + pc);
138 
139  if (pa <= pb && pa <= pc)
140  p = a;
141  else if (pb <= pc)
142  p = b;
143  else
144  p = c;
145  dst[i] = src[i] - p;
146  }
147 }
148 
149 static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
150 {
151  const uint8_t *src1 = src + bpp;
152  const uint8_t *src2 = src;
153  int x, unaligned_w;
154 
155  memcpy(dst, src, bpp);
156  dst += bpp;
157  size -= bpp;
158  unaligned_w = FFMIN(32 - bpp, size);
159  for (x = 0; x < unaligned_w; x++)
160  *dst++ = *src1++ - *src2++;
161  size -= unaligned_w;
162  c->llvidencdsp.diff_bytes(dst, src1, src2, size);
163 }
164 
165 static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type,
166  uint8_t *src, uint8_t *top, int size, int bpp)
167 {
168  int i;
169 
170  switch (filter_type) {
172  memcpy(dst, src, size);
173  break;
175  sub_left_prediction(c, dst, src, bpp, size);
176  break;
177  case PNG_FILTER_VALUE_UP:
178  c->llvidencdsp.diff_bytes(dst, src, top, size);
179  break;
181  for (i = 0; i < bpp; i++)
182  dst[i] = src[i] - (top[i] >> 1);
183  for (; i < size; i++)
184  dst[i] = src[i] - ((src[i - bpp] + top[i]) >> 1);
185  break;
187  for (i = 0; i < bpp; i++)
188  dst[i] = src[i] - top[i];
189  sub_png_paeth_prediction(dst + i, src + i, top + i, size - i, bpp);
190  break;
191  }
192 }
193 
195  uint8_t *src, uint8_t *top, int size, int bpp)
196 {
197  int pred = s->filter_type;
198  av_assert0(bpp || !pred);
199  if (!top && pred)
200  pred = PNG_FILTER_VALUE_SUB;
201  if (pred == PNG_FILTER_VALUE_MIXED) {
202  int i;
203  int cost, bcost = INT_MAX;
204  uint8_t *buf1 = dst, *buf2 = dst + size + 16;
205  for (pred = 0; pred < 5; pred++) {
206  png_filter_row(s, buf1 + 1, pred, src, top, size, bpp);
207  buf1[0] = pred;
208  cost = 0;
209  for (i = 0; i <= size; i++)
210  cost += abs((int8_t) buf1[i]);
211  if (cost < bcost) {
212  bcost = cost;
213  FFSWAP(uint8_t *, buf1, buf2);
214  }
215  }
216  return buf2;
217  } else {
218  png_filter_row(s, dst + 1, pred, src, top, size, bpp);
219  dst[0] = pred;
220  return dst;
221  }
222 }
223 
224 static void png_write_chunk(uint8_t **f, uint32_t tag,
225  const uint8_t *buf, int length)
226 {
227  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
228  uint32_t crc = ~0U;
229  uint8_t tagbuf[4];
230 
231  bytestream_put_be32(f, length);
232  AV_WL32(tagbuf, tag);
233  crc = av_crc(crc_table, crc, tagbuf, 4);
234  bytestream_put_be32(f, av_bswap32(tag));
235  if (length > 0) {
236  crc = av_crc(crc_table, crc, buf, length);
237  memcpy(*f, buf, length);
238  *f += length;
239  }
240  bytestream_put_be32(f, ~crc);
241 }
242 
244  const uint8_t *buf, int length)
245 {
246  PNGEncContext *s = avctx->priv_data;
247  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
248  uint32_t crc = ~0U;
249 
250  if (avctx->codec_id == AV_CODEC_ID_PNG || avctx->frame_number == 0) {
251  png_write_chunk(&s->bytestream, MKTAG('I', 'D', 'A', 'T'), buf, length);
252  return;
253  }
254 
255  bytestream_put_be32(&s->bytestream, length + 4);
256 
257  bytestream_put_be32(&s->bytestream, MKBETAG('f', 'd', 'A', 'T'));
258  bytestream_put_be32(&s->bytestream, s->sequence_number);
259  crc = av_crc(crc_table, crc, s->bytestream - 8, 8);
260 
261  crc = av_crc(crc_table, crc, buf, length);
262  memcpy(s->bytestream, buf, length);
263  s->bytestream += length;
264 
265  bytestream_put_be32(&s->bytestream, ~crc);
266 
267  ++s->sequence_number;
268 }
269 
270 /* XXX: do filtering */
271 static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
272 {
273  PNGEncContext *s = avctx->priv_data;
274  int ret;
275 
276  s->zstream.avail_in = size;
277  s->zstream.next_in = data;
278  while (s->zstream.avail_in > 0) {
279  ret = deflate(&s->zstream, Z_NO_FLUSH);
280  if (ret != Z_OK)
281  return -1;
282  if (s->zstream.avail_out == 0) {
283  if (s->bytestream_end - s->bytestream > IOBUF_SIZE + 100)
284  png_write_image_data(avctx, s->buf, IOBUF_SIZE);
285  s->zstream.avail_out = IOBUF_SIZE;
286  s->zstream.next_out = s->buf;
287  }
288  }
289  return 0;
290 }
291 
292 #define AV_WB32_PNG(buf, n) AV_WB32(buf, lrint((n) * 100000))
293 static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
294 {
295  double rx, ry, gx, gy, bx, by, wx = 0.3127, wy = 0.3290;
296  switch (prim) {
297  case AVCOL_PRI_BT709:
298  rx = 0.640; ry = 0.330;
299  gx = 0.300; gy = 0.600;
300  bx = 0.150; by = 0.060;
301  break;
302  case AVCOL_PRI_BT470M:
303  rx = 0.670; ry = 0.330;
304  gx = 0.210; gy = 0.710;
305  bx = 0.140; by = 0.080;
306  wx = 0.310; wy = 0.316;
307  break;
308  case AVCOL_PRI_BT470BG:
309  rx = 0.640; ry = 0.330;
310  gx = 0.290; gy = 0.600;
311  bx = 0.150; by = 0.060;
312  break;
313  case AVCOL_PRI_SMPTE170M:
314  case AVCOL_PRI_SMPTE240M:
315  rx = 0.630; ry = 0.340;
316  gx = 0.310; gy = 0.595;
317  bx = 0.155; by = 0.070;
318  break;
319  case AVCOL_PRI_BT2020:
320  rx = 0.708; ry = 0.292;
321  gx = 0.170; gy = 0.797;
322  bx = 0.131; by = 0.046;
323  break;
324  default:
325  return 0;
326  }
327 
328  AV_WB32_PNG(buf , wx); AV_WB32_PNG(buf + 4 , wy);
329  AV_WB32_PNG(buf + 8 , rx); AV_WB32_PNG(buf + 12, ry);
330  AV_WB32_PNG(buf + 16, gx); AV_WB32_PNG(buf + 20, gy);
331  AV_WB32_PNG(buf + 24, bx); AV_WB32_PNG(buf + 28, by);
332  return 1;
333 }
334 
336 {
337  double gamma = avpriv_get_gamma_from_trc(trc);
338  if (gamma <= 1e-6)
339  return 0;
340 
341  AV_WB32_PNG(buf, 1.0 / gamma);
342  return 1;
343 }
344 
345 static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
346 {
347  AVFrameSideData *side_data;
348  PNGEncContext *s = avctx->priv_data;
349 
350  /* write png header */
351  AV_WB32(s->buf, avctx->width);
352  AV_WB32(s->buf + 4, avctx->height);
353  s->buf[8] = s->bit_depth;
354  s->buf[9] = s->color_type;
355  s->buf[10] = 0; /* compression type */
356  s->buf[11] = 0; /* filter type */
357  s->buf[12] = s->is_progressive; /* interlace type */
358  png_write_chunk(&s->bytestream, MKTAG('I', 'H', 'D', 'R'), s->buf, 13);
359 
360  /* write physical information */
361  if (s->dpm) {
362  AV_WB32(s->buf, s->dpm);
363  AV_WB32(s->buf + 4, s->dpm);
364  s->buf[8] = 1; /* unit specifier is meter */
365  } else {
366  AV_WB32(s->buf, avctx->sample_aspect_ratio.num);
367  AV_WB32(s->buf + 4, avctx->sample_aspect_ratio.den);
368  s->buf[8] = 0; /* unit specifier is unknown */
369  }
370  png_write_chunk(&s->bytestream, MKTAG('p', 'H', 'Y', 's'), s->buf, 9);
371 
372  /* write stereoscopic information */
374  if (side_data) {
375  AVStereo3D *stereo3d = (AVStereo3D *)side_data->data;
376  switch (stereo3d->type) {
378  s->buf[0] = ((stereo3d->flags & AV_STEREO3D_FLAG_INVERT) == 0) ? 1 : 0;
379  png_write_chunk(&s->bytestream, MKTAG('s', 'T', 'E', 'R'), s->buf, 1);
380  break;
381  case AV_STEREO3D_2D:
382  break;
383  default:
384  av_log(avctx, AV_LOG_WARNING, "Only side-by-side stereo3d flag can be defined within sTER chunk\n");
385  break;
386  }
387  }
388 
389  /* write colorspace information */
390  if (pict->color_primaries == AVCOL_PRI_BT709 &&
392  s->buf[0] = 1; /* rendering intent, relative colorimetric by default */
393  png_write_chunk(&s->bytestream, MKTAG('s', 'R', 'G', 'B'), s->buf, 1);
394  }
395 
396  if (png_get_chrm(pict->color_primaries, s->buf))
397  png_write_chunk(&s->bytestream, MKTAG('c', 'H', 'R', 'M'), s->buf, 32);
398  if (png_get_gama(pict->color_trc, s->buf))
399  png_write_chunk(&s->bytestream, MKTAG('g', 'A', 'M', 'A'), s->buf, 4);
400 
401  /* put the palette if needed */
403  int has_alpha, alpha, i;
404  unsigned int v;
405  uint32_t *palette;
406  uint8_t *ptr, *alpha_ptr;
407 
408  palette = (uint32_t *)pict->data[1];
409  ptr = s->buf;
410  alpha_ptr = s->buf + 256 * 3;
411  has_alpha = 0;
412  for (i = 0; i < 256; i++) {
413  v = palette[i];
414  alpha = v >> 24;
415  if (alpha != 0xff)
416  has_alpha = 1;
417  *alpha_ptr++ = alpha;
418  bytestream_put_be24(&ptr, v);
419  }
421  MKTAG('P', 'L', 'T', 'E'), s->buf, 256 * 3);
422  if (has_alpha) {
424  MKTAG('t', 'R', 'N', 'S'), s->buf + 256 * 3, 256);
425  }
426  }
427 
428  return 0;
429 }
430 
431 static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
432 {
433  PNGEncContext *s = avctx->priv_data;
434  const AVFrame *const p = pict;
435  int y, len, ret;
436  int row_size, pass_row_size;
437  uint8_t *ptr, *top, *crow_buf, *crow;
438  uint8_t *crow_base = NULL;
439  uint8_t *progressive_buf = NULL;
440  uint8_t *top_buf = NULL;
441 
442  row_size = (pict->width * s->bits_per_pixel + 7) >> 3;
443 
444  crow_base = av_malloc((row_size + 32) << (s->filter_type == PNG_FILTER_VALUE_MIXED));
445  if (!crow_base) {
446  ret = AVERROR(ENOMEM);
447  goto the_end;
448  }
449  // pixel data should be aligned, but there's a control byte before it
450  crow_buf = crow_base + 15;
451  if (s->is_progressive) {
452  progressive_buf = av_malloc(row_size + 1);
453  top_buf = av_malloc(row_size + 1);
454  if (!progressive_buf || !top_buf) {
455  ret = AVERROR(ENOMEM);
456  goto the_end;
457  }
458  }
459 
460  /* put each row */
461  s->zstream.avail_out = IOBUF_SIZE;
462  s->zstream.next_out = s->buf;
463  if (s->is_progressive) {
464  int pass;
465 
466  for (pass = 0; pass < NB_PASSES; pass++) {
467  /* NOTE: a pass is completely omitted if no pixels would be
468  * output */
469  pass_row_size = ff_png_pass_row_size(pass, s->bits_per_pixel, pict->width);
470  if (pass_row_size > 0) {
471  top = NULL;
472  for (y = 0; y < pict->height; y++)
473  if ((ff_png_pass_ymask[pass] << (y & 7)) & 0x80) {
474  ptr = p->data[0] + y * p->linesize[0];
475  FFSWAP(uint8_t *, progressive_buf, top_buf);
476  png_get_interlaced_row(progressive_buf, pass_row_size,
477  s->bits_per_pixel, pass,
478  ptr, pict->width);
479  crow = png_choose_filter(s, crow_buf, progressive_buf,
480  top, pass_row_size, s->bits_per_pixel >> 3);
481  png_write_row(avctx, crow, pass_row_size + 1);
482  top = progressive_buf;
483  }
484  }
485  }
486  } else {
487  top = NULL;
488  for (y = 0; y < pict->height; y++) {
489  ptr = p->data[0] + y * p->linesize[0];
490  crow = png_choose_filter(s, crow_buf, ptr, top,
491  row_size, s->bits_per_pixel >> 3);
492  png_write_row(avctx, crow, row_size + 1);
493  top = ptr;
494  }
495  }
496  /* compress last bytes */
497  for (;;) {
498  ret = deflate(&s->zstream, Z_FINISH);
499  if (ret == Z_OK || ret == Z_STREAM_END) {
500  len = IOBUF_SIZE - s->zstream.avail_out;
501  if (len > 0 && s->bytestream_end - s->bytestream > len + 100) {
502  png_write_image_data(avctx, s->buf, len);
503  }
504  s->zstream.avail_out = IOBUF_SIZE;
505  s->zstream.next_out = s->buf;
506  if (ret == Z_STREAM_END)
507  break;
508  } else {
509  ret = -1;
510  goto the_end;
511  }
512  }
513 
514  ret = 0;
515 
516 the_end:
517  av_freep(&crow_base);
518  av_freep(&progressive_buf);
519  av_freep(&top_buf);
520  deflateReset(&s->zstream);
521  return ret;
522 }
523 
524 static int encode_png(AVCodecContext *avctx, AVPacket *pkt,
525  const AVFrame *pict, int *got_packet)
526 {
527  PNGEncContext *s = avctx->priv_data;
528  int ret;
529  int enc_row_size;
530  size_t max_packet_size;
531 
532  enc_row_size = deflateBound(&s->zstream, (avctx->width * s->bits_per_pixel + 7) >> 3);
533  max_packet_size =
534  AV_INPUT_BUFFER_MIN_SIZE + // headers
535  avctx->height * (
536  enc_row_size +
537  12 * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // IDAT * ceil(enc_row_size / IOBUF_SIZE)
538  );
539  if (max_packet_size > INT_MAX)
540  return AVERROR(ENOMEM);
541  ret = ff_alloc_packet2(avctx, pkt, max_packet_size, 0);
542  if (ret < 0)
543  return ret;
544 
545  s->bytestream_start =
546  s->bytestream = pkt->data;
547  s->bytestream_end = pkt->data + pkt->size;
548 
550  s->bytestream += 8;
551 
552  ret = encode_headers(avctx, pict);
553  if (ret < 0)
554  return ret;
555 
556  ret = encode_frame(avctx, pict);
557  if (ret < 0)
558  return ret;
559 
560  png_write_chunk(&s->bytestream, MKTAG('I', 'E', 'N', 'D'), NULL, 0);
561 
562  pkt->size = s->bytestream - s->bytestream_start;
563  pkt->flags |= AV_PKT_FLAG_KEY;
564  *got_packet = 1;
565 
566  return 0;
567 }
568 
570  APNGFctlChunk *fctl_chunk, uint8_t bpp)
571 {
572  // output: background, input: foreground
573  // output the image such that when blended with the background, will produce the foreground
574 
575  unsigned int x, y;
576  unsigned int leftmost_x = input->width;
577  unsigned int rightmost_x = 0;
578  unsigned int topmost_y = input->height;
579  unsigned int bottommost_y = 0;
580  const uint8_t *input_data = input->data[0];
581  uint8_t *output_data = output->data[0];
582  ptrdiff_t input_linesize = input->linesize[0];
583  ptrdiff_t output_linesize = output->linesize[0];
584 
585  // Find bounding box of changes
586  for (y = 0; y < input->height; ++y) {
587  for (x = 0; x < input->width; ++x) {
588  if (!memcmp(input_data + bpp * x, output_data + bpp * x, bpp))
589  continue;
590 
591  if (x < leftmost_x)
592  leftmost_x = x;
593  if (x >= rightmost_x)
594  rightmost_x = x + 1;
595  if (y < topmost_y)
596  topmost_y = y;
597  if (y >= bottommost_y)
598  bottommost_y = y + 1;
599  }
600 
601  input_data += input_linesize;
602  output_data += output_linesize;
603  }
604 
605  if (leftmost_x == input->width && rightmost_x == 0) {
606  // Empty frame
607  // APNG does not support empty frames, so we make it a 1x1 frame
608  leftmost_x = topmost_y = 0;
609  rightmost_x = bottommost_y = 1;
610  }
611 
612  // Do actual inverse blending
613  if (fctl_chunk->blend_op == APNG_BLEND_OP_SOURCE) {
614  output_data = output->data[0];
615  for (y = topmost_y; y < bottommost_y; ++y) {
616  memcpy(output_data,
617  input->data[0] + input_linesize * y + bpp * leftmost_x,
618  bpp * (rightmost_x - leftmost_x));
619  output_data += output_linesize;
620  }
621  } else { // APNG_BLEND_OP_OVER
622  size_t transparent_palette_index;
623  uint32_t *palette;
624 
625  switch (input->format) {
626  case AV_PIX_FMT_RGBA64BE:
627  case AV_PIX_FMT_YA16BE:
628  case AV_PIX_FMT_RGBA:
629  case AV_PIX_FMT_GRAY8A:
630  break;
631 
632  case AV_PIX_FMT_PAL8:
633  palette = (uint32_t*)input->data[1];
634  for (transparent_palette_index = 0; transparent_palette_index < 256; ++transparent_palette_index)
635  if (palette[transparent_palette_index] >> 24 == 0)
636  break;
637  break;
638 
639  default:
640  // No alpha, so blending not possible
641  return -1;
642  }
643 
644  for (y = topmost_y; y < bottommost_y; ++y) {
645  uint8_t *foreground = input->data[0] + input_linesize * y + bpp * leftmost_x;
646  uint8_t *background = output->data[0] + output_linesize * y + bpp * leftmost_x;
647  output_data = output->data[0] + output_linesize * (y - topmost_y);
648  for (x = leftmost_x; x < rightmost_x; ++x, foreground += bpp, background += bpp, output_data += bpp) {
649  if (!memcmp(foreground, background, bpp)) {
650  if (input->format == AV_PIX_FMT_PAL8) {
651  if (transparent_palette_index == 256) {
652  // Need fully transparent colour, but none exists
653  return -1;
654  }
655 
656  *output_data = transparent_palette_index;
657  } else {
658  memset(output_data, 0, bpp);
659  }
660  continue;
661  }
662 
663  // Check for special alpha values, since full inverse
664  // alpha-on-alpha blending is rarely possible, and when
665  // possible, doesn't compress much better than
666  // APNG_BLEND_OP_SOURCE blending
667  switch (input->format) {
668  case AV_PIX_FMT_RGBA64BE:
669  if (((uint16_t*)foreground)[3] == 0xffff ||
670  ((uint16_t*)background)[3] == 0)
671  break;
672  return -1;
673 
674  case AV_PIX_FMT_YA16BE:
675  if (((uint16_t*)foreground)[1] == 0xffff ||
676  ((uint16_t*)background)[1] == 0)
677  break;
678  return -1;
679 
680  case AV_PIX_FMT_RGBA:
681  if (foreground[3] == 0xff || background[3] == 0)
682  break;
683  return -1;
684 
685  case AV_PIX_FMT_GRAY8A:
686  if (foreground[1] == 0xff || background[1] == 0)
687  break;
688  return -1;
689 
690  case AV_PIX_FMT_PAL8:
691  if (palette[*foreground] >> 24 == 0xff ||
692  palette[*background] >> 24 == 0)
693  break;
694  return -1;
695  }
696 
697  memmove(output_data, foreground, bpp);
698  }
699  }
700  }
701 
702  output->width = rightmost_x - leftmost_x;
703  output->height = bottommost_y - topmost_y;
704  fctl_chunk->width = output->width;
705  fctl_chunk->height = output->height;
706  fctl_chunk->x_offset = leftmost_x;
707  fctl_chunk->y_offset = topmost_y;
708 
709  return 0;
710 }
711 
712 static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict,
713  APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
714 {
715  PNGEncContext *s = avctx->priv_data;
716  int ret;
717  unsigned int y;
718  AVFrame* diffFrame;
719  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
720  uint8_t *original_bytestream, *original_bytestream_end;
721  uint8_t *temp_bytestream = 0, *temp_bytestream_end;
722  uint32_t best_sequence_number;
723  uint8_t *best_bytestream;
724  size_t best_bytestream_size = SIZE_MAX;
725  APNGFctlChunk last_fctl_chunk = *best_last_fctl_chunk;
726  APNGFctlChunk fctl_chunk = *best_fctl_chunk;
727 
728  if (avctx->frame_number == 0) {
729  best_fctl_chunk->width = pict->width;
730  best_fctl_chunk->height = pict->height;
731  best_fctl_chunk->x_offset = 0;
732  best_fctl_chunk->y_offset = 0;
733  best_fctl_chunk->blend_op = APNG_BLEND_OP_SOURCE;
734  return encode_frame(avctx, pict);
735  }
736 
737  diffFrame = av_frame_alloc();
738  if (!diffFrame)
739  return AVERROR(ENOMEM);
740 
741  diffFrame->format = pict->format;
742  diffFrame->width = pict->width;
743  diffFrame->height = pict->height;
744  if ((ret = av_frame_get_buffer(diffFrame, 32)) < 0)
745  goto fail;
746 
747  original_bytestream = s->bytestream;
748  original_bytestream_end = s->bytestream_end;
749 
750  temp_bytestream = av_malloc(original_bytestream_end - original_bytestream);
751  if (!temp_bytestream) {
752  ret = AVERROR(ENOMEM);
753  goto fail;
754  }
755  temp_bytestream_end = temp_bytestream + (original_bytestream_end - original_bytestream);
756 
757  for (last_fctl_chunk.dispose_op = 0; last_fctl_chunk.dispose_op < 3; ++last_fctl_chunk.dispose_op) {
758  // 0: APNG_DISPOSE_OP_NONE
759  // 1: APNG_DISPOSE_OP_BACKGROUND
760  // 2: APNG_DISPOSE_OP_PREVIOUS
761 
762  for (fctl_chunk.blend_op = 0; fctl_chunk.blend_op < 2; ++fctl_chunk.blend_op) {
763  // 0: APNG_BLEND_OP_SOURCE
764  // 1: APNG_BLEND_OP_OVER
765 
766  uint32_t original_sequence_number = s->sequence_number, sequence_number;
767  uint8_t *bytestream_start = s->bytestream;
768  size_t bytestream_size;
769 
770  // Do disposal
771  if (last_fctl_chunk.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
772  diffFrame->width = pict->width;
773  diffFrame->height = pict->height;
774  ret = av_frame_copy(diffFrame, s->last_frame);
775  if (ret < 0)
776  goto fail;
777 
778  if (last_fctl_chunk.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
779  for (y = last_fctl_chunk.y_offset; y < last_fctl_chunk.y_offset + last_fctl_chunk.height; ++y) {
780  size_t row_start = diffFrame->linesize[0] * y + bpp * last_fctl_chunk.x_offset;
781  memset(diffFrame->data[0] + row_start, 0, bpp * last_fctl_chunk.width);
782  }
783  }
784  } else {
785  if (!s->prev_frame)
786  continue;
787 
788  diffFrame->width = pict->width;
789  diffFrame->height = pict->height;
790  ret = av_frame_copy(diffFrame, s->prev_frame);
791  if (ret < 0)
792  goto fail;
793  }
794 
795  // Do inverse blending
796  if (apng_do_inverse_blend(diffFrame, pict, &fctl_chunk, bpp) < 0)
797  continue;
798 
799  // Do encoding
800  ret = encode_frame(avctx, diffFrame);
802  s->sequence_number = original_sequence_number;
803  bytestream_size = s->bytestream - bytestream_start;
804  s->bytestream = bytestream_start;
805  if (ret < 0)
806  goto fail;
807 
808  if (bytestream_size < best_bytestream_size) {
809  *best_fctl_chunk = fctl_chunk;
810  *best_last_fctl_chunk = last_fctl_chunk;
811 
812  best_sequence_number = sequence_number;
813  best_bytestream = s->bytestream;
814  best_bytestream_size = bytestream_size;
815 
816  if (best_bytestream == original_bytestream) {
817  s->bytestream = temp_bytestream;
818  s->bytestream_end = temp_bytestream_end;
819  } else {
820  s->bytestream = original_bytestream;
821  s->bytestream_end = original_bytestream_end;
822  }
823  }
824  }
825  }
826 
827  s->sequence_number = best_sequence_number;
828  s->bytestream = original_bytestream + best_bytestream_size;
829  s->bytestream_end = original_bytestream_end;
830  if (best_bytestream != original_bytestream)
831  memcpy(original_bytestream, best_bytestream, best_bytestream_size);
832 
833  ret = 0;
834 
835 fail:
836  av_freep(&temp_bytestream);
837  av_frame_free(&diffFrame);
838  return ret;
839 }
840 
842  const AVFrame *pict, int *got_packet)
843 {
844  PNGEncContext *s = avctx->priv_data;
845  int ret;
846  int enc_row_size;
847  size_t max_packet_size;
848  APNGFctlChunk fctl_chunk = {0};
849 
850  if (pict && avctx->codec_id == AV_CODEC_ID_APNG && s->color_type == PNG_COLOR_TYPE_PALETTE) {
851  uint32_t checksum = ~av_crc(av_crc_get_table(AV_CRC_32_IEEE_LE), ~0U, pict->data[1], 256 * sizeof(uint32_t));
852 
853  if (avctx->frame_number == 0) {
855  } else if (checksum != s->palette_checksum) {
856  av_log(avctx, AV_LOG_ERROR,
857  "Input contains more than one unique palette. APNG does not support multiple palettes.\n");
858  return -1;
859  }
860  }
861 
862  enc_row_size = deflateBound(&s->zstream, (avctx->width * s->bits_per_pixel + 7) >> 3);
863  max_packet_size =
864  AV_INPUT_BUFFER_MIN_SIZE + // headers
865  avctx->height * (
866  enc_row_size +
867  (4 + 12) * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // fdAT * ceil(enc_row_size / IOBUF_SIZE)
868  );
869  if (max_packet_size > INT_MAX)
870  return AVERROR(ENOMEM);
871 
872  if (avctx->frame_number == 0) {
873  if (!pict)
874  return AVERROR(EINVAL);
875 
877  if (!s->extra_data)
878  return AVERROR(ENOMEM);
879 
880  ret = encode_headers(avctx, pict);
881  if (ret < 0)
882  return ret;
883 
885 
886  s->last_frame_packet = av_malloc(max_packet_size);
887  if (!s->last_frame_packet)
888  return AVERROR(ENOMEM);
889  } else if (s->last_frame) {
890  ret = ff_alloc_packet2(avctx, pkt, max_packet_size, 0);
891  if (ret < 0)
892  return ret;
893 
894  memcpy(pkt->data, s->last_frame_packet, s->last_frame_packet_size);
895  pkt->size = s->last_frame_packet_size;
896  pkt->pts = pkt->dts = s->last_frame->pts;
897  }
898 
899  if (pict) {
900  s->bytestream_start =
902  s->bytestream_end = s->bytestream + max_packet_size;
903 
904  // We're encoding the frame first, so we have to do a bit of shuffling around
905  // to have the image data write to the correct place in the buffer
906  fctl_chunk.sequence_number = s->sequence_number;
907  ++s->sequence_number;
908  s->bytestream += 26 + 12;
909 
910  ret = apng_encode_frame(avctx, pict, &fctl_chunk, &s->last_frame_fctl);
911  if (ret < 0)
912  return ret;
913 
914  fctl_chunk.delay_num = 0; // delay filled in during muxing
915  fctl_chunk.delay_den = 0;
916  } else {
918  }
919 
920  if (s->last_frame) {
921  uint8_t* last_fctl_chunk_start = pkt->data;
922  uint8_t buf[26];
923  if (!s->extra_data_updated) {
925  if (!side_data)
926  return AVERROR(ENOMEM);
927  memcpy(side_data, s->extra_data, s->extra_data_size);
928  s->extra_data_updated = 1;
929  }
930 
932  AV_WB32(buf + 4, s->last_frame_fctl.width);
933  AV_WB32(buf + 8, s->last_frame_fctl.height);
934  AV_WB32(buf + 12, s->last_frame_fctl.x_offset);
935  AV_WB32(buf + 16, s->last_frame_fctl.y_offset);
936  AV_WB16(buf + 20, s->last_frame_fctl.delay_num);
937  AV_WB16(buf + 22, s->last_frame_fctl.delay_den);
938  buf[24] = s->last_frame_fctl.dispose_op;
939  buf[25] = s->last_frame_fctl.blend_op;
940  png_write_chunk(&last_fctl_chunk_start, MKTAG('f', 'c', 'T', 'L'), buf, 26);
941 
942  *got_packet = 1;
943  }
944 
945  if (pict) {
946  if (!s->last_frame) {
947  s->last_frame = av_frame_alloc();
948  if (!s->last_frame)
949  return AVERROR(ENOMEM);
951  if (!s->prev_frame) {
952  s->prev_frame = av_frame_alloc();
953  if (!s->prev_frame)
954  return AVERROR(ENOMEM);
955 
956  s->prev_frame->format = pict->format;
957  s->prev_frame->width = pict->width;
958  s->prev_frame->height = pict->height;
959  if ((ret = av_frame_get_buffer(s->prev_frame, 32)) < 0)
960  return ret;
961  }
962 
963  // Do disposal, but not blending
966  uint32_t y;
967  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
968  for (y = s->last_frame_fctl.y_offset; y < s->last_frame_fctl.y_offset + s->last_frame_fctl.height; ++y) {
969  size_t row_start = s->prev_frame->linesize[0] * y + bpp * s->last_frame_fctl.x_offset;
970  memset(s->prev_frame->data[0] + row_start, 0, bpp * s->last_frame_fctl.width);
971  }
972  }
973  }
974 
976  ret = av_frame_ref(s->last_frame, (AVFrame*)pict);
977  if (ret < 0)
978  return ret;
979 
980  s->last_frame_fctl = fctl_chunk;
982  } else {
984  }
985 
986  return 0;
987 }
988 
990 {
991  PNGEncContext *s = avctx->priv_data;
992  int compression_level;
993 
994  switch (avctx->pix_fmt) {
995  case AV_PIX_FMT_RGBA:
996  avctx->bits_per_coded_sample = 32;
997  break;
998  case AV_PIX_FMT_RGB24:
999  avctx->bits_per_coded_sample = 24;
1000  break;
1001  case AV_PIX_FMT_GRAY8:
1002  avctx->bits_per_coded_sample = 0x28;
1003  break;
1004  case AV_PIX_FMT_MONOBLACK:
1005  avctx->bits_per_coded_sample = 1;
1006  break;
1007  case AV_PIX_FMT_PAL8:
1008  avctx->bits_per_coded_sample = 8;
1009  }
1010 
1011 #if FF_API_CODED_FRAME
1014  avctx->coded_frame->key_frame = 1;
1016 #endif
1017 
1019 
1020 #if FF_API_PRIVATE_OPT
1022  if (avctx->prediction_method)
1023  s->filter_type = av_clip(avctx->prediction_method,
1027 #endif
1028 
1029  if (avctx->pix_fmt == AV_PIX_FMT_MONOBLACK)
1031 
1032  if (s->dpi && s->dpm) {
1033  av_log(avctx, AV_LOG_ERROR, "Only one of 'dpi' or 'dpm' options should be set\n");
1034  return AVERROR(EINVAL);
1035  } else if (s->dpi) {
1036  s->dpm = s->dpi * 10000 / 254;
1037  }
1038 
1040  switch (avctx->pix_fmt) {
1041  case AV_PIX_FMT_RGBA64BE:
1042  s->bit_depth = 16;
1044  break;
1045  case AV_PIX_FMT_RGB48BE:
1046  s->bit_depth = 16;
1048  break;
1049  case AV_PIX_FMT_RGBA:
1050  s->bit_depth = 8;
1052  break;
1053  case AV_PIX_FMT_RGB24:
1054  s->bit_depth = 8;
1056  break;
1057  case AV_PIX_FMT_GRAY16BE:
1058  s->bit_depth = 16;
1060  break;
1061  case AV_PIX_FMT_GRAY8:
1062  s->bit_depth = 8;
1064  break;
1065  case AV_PIX_FMT_GRAY8A:
1066  s->bit_depth = 8;
1068  break;
1069  case AV_PIX_FMT_YA16BE:
1070  s->bit_depth = 16;
1072  break;
1073  case AV_PIX_FMT_MONOBLACK:
1074  s->bit_depth = 1;
1076  break;
1077  case AV_PIX_FMT_PAL8:
1078  s->bit_depth = 8;
1080  break;
1081  default:
1082  return -1;
1083  }
1085 
1086  s->zstream.zalloc = ff_png_zalloc;
1087  s->zstream.zfree = ff_png_zfree;
1088  s->zstream.opaque = NULL;
1089  compression_level = avctx->compression_level == FF_COMPRESSION_DEFAULT
1090  ? Z_DEFAULT_COMPRESSION
1091  : av_clip(avctx->compression_level, 0, 9);
1092  if (deflateInit2(&s->zstream, compression_level, Z_DEFLATED, 15, 8, Z_DEFAULT_STRATEGY) != Z_OK)
1093  return -1;
1094 
1095  return 0;
1096 }
1097 
1099 {
1100  PNGEncContext *s = avctx->priv_data;
1101 
1102  deflateEnd(&s->zstream);
1106  av_freep(&s->extra_data);
1107  s->extra_data_size = 0;
1108  return 0;
1109 }
1110 
1111 #define OFFSET(x) offsetof(PNGEncContext, x)
1112 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
1113 static const AVOption options[] = {
1114  {"dpi", "Set image resolution (in dots per inch)", OFFSET(dpi), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1115  {"dpm", "Set image resolution (in dots per meter)", OFFSET(dpm), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1116  { "pred", "Prediction method", OFFSET(filter_type), AV_OPT_TYPE_INT, { .i64 = PNG_FILTER_VALUE_NONE }, PNG_FILTER_VALUE_NONE, PNG_FILTER_VALUE_MIXED, VE, "pred" },
1117  { "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_NONE }, INT_MIN, INT_MAX, VE, "pred" },
1118  { "sub", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_SUB }, INT_MIN, INT_MAX, VE, "pred" },
1119  { "up", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_UP }, INT_MIN, INT_MAX, VE, "pred" },
1120  { "avg", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_AVG }, INT_MIN, INT_MAX, VE, "pred" },
1121  { "paeth", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_PAETH }, INT_MIN, INT_MAX, VE, "pred" },
1122  { "mixed", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_MIXED }, INT_MIN, INT_MAX, VE, "pred" },
1123  { NULL},
1124 };
1125 
1126 static const AVClass pngenc_class = {
1127  .class_name = "PNG encoder",
1128  .item_name = av_default_item_name,
1129  .option = options,
1130  .version = LIBAVUTIL_VERSION_INT,
1131 };
1132 
1133 static const AVClass apngenc_class = {
1134  .class_name = "APNG encoder",
1135  .item_name = av_default_item_name,
1136  .option = options,
1137  .version = LIBAVUTIL_VERSION_INT,
1138 };
1139 
1141  .name = "png",
1142  .long_name = NULL_IF_CONFIG_SMALL("PNG (Portable Network Graphics) image"),
1143  .type = AVMEDIA_TYPE_VIDEO,
1144  .id = AV_CODEC_ID_PNG,
1145  .priv_data_size = sizeof(PNGEncContext),
1146  .init = png_enc_init,
1147  .close = png_enc_close,
1148  .encode2 = encode_png,
1150  .pix_fmts = (const enum AVPixelFormat[]) {
1157  },
1158  .priv_class = &pngenc_class,
1159 };
1160 
1162  .name = "apng",
1163  .long_name = NULL_IF_CONFIG_SMALL("APNG (Animated Portable Network Graphics) image"),
1164  .type = AVMEDIA_TYPE_VIDEO,
1165  .id = AV_CODEC_ID_APNG,
1166  .priv_data_size = sizeof(PNGEncContext),
1167  .init = png_enc_init,
1168  .close = png_enc_close,
1169  .encode2 = encode_apng,
1170  .capabilities = AV_CODEC_CAP_DELAY,
1171  .pix_fmts = (const enum AVPixelFormat[]) {
1178  },
1179  .priv_class = &apngenc_class,
1180 };
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:167
static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type, uint8_t *src, uint8_t *top, int size, int bpp)
Definition: pngenc.c:165
#define PNG_FILTER_VALUE_AVG
Definition: png.h:41
#define NULL
Definition: coverity.c:32
int extra_data_size
Definition: pngenc.c:73
APNGFctlChunk last_frame_fctl
Definition: pngenc.c:77
#define FF_COMPRESSION_DEFAULT
Definition: avcodec.h:1638
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
BYTE int const BYTE int int row_size
Definition: avisynth_c.h:908
AVOption.
Definition: opt.h:246
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
#define AV_CODEC_FLAG_INTERLACED_DCT
Use interlaced DCT.
Definition: avcodec.h:896
static uint8_t * png_choose_filter(PNGEncContext *s, uint8_t *dst, uint8_t *src, uint8_t *top, int size, int bpp)
Definition: pngenc.c:194
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:68
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
static const AVClass apngenc_class
Definition: pngenc.c:1133
uint8_t * bytestream
Definition: pngenc.c:52
int num
Numerator.
Definition: rational.h:59
int size
Definition: avcodec.h:1478
uint32_t x_offset
Definition: pngenc.c:43
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:191
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel...
Definition: avcodec.h:1944
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:36
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1775
static int encode_png(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:524
int color_type
Definition: pngenc.c:65
Views are next to each other.
Definition: stereo3d.h:67
static AVPacket pkt
#define PNG_COLOR_TYPE_RGB
Definition: png.h:33
static const AVClass pngenc_class
Definition: pngenc.c:1126
int bit_depth
Definition: pngenc.c:64
#define PNG_COLOR_TYPE_GRAY_ALPHA
Definition: png.h:35
#define src
Definition: vp8dsp.c:254
AVCodec.
Definition: avcodec.h:3481
#define PNG_COLOR_TYPE_PALETTE
Definition: png.h:32
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:467
#define OFFSET(x)
Definition: pngenc.c:1111
#define AV_CODEC_CAP_INTRA_ONLY
Codec is intra only.
Definition: avcodec.h:1067
size_t last_frame_packet_size
Definition: pngenc.c:79
int filter_type
Definition: pngenc.c:56
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:734
#define PNG_FILTER_VALUE_PAETH
Definition: png.h:42
av_cold void ff_llvidencdsp_init(LLVidEncDSPContext *c)
uint32_t y_offset
Definition: pngenc.c:43
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
#define AV_WB64(p, v)
Definition: intreadwrite.h:433
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: avcodec.h:1006
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
double avpriv_get_gamma_from_trc(enum AVColorTransferCharacteristic trc)
Determine a suitable &#39;gamma&#39; value to match the supplied AVColorTransferCharacteristic.
Definition: color_utils.c:28
int ff_alloc_packet2(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int64_t min_size)
Check AVPacket size and/or allocate data.
Definition: encode.c:32
int bits_per_pixel
Definition: pngenc.c:66
uint8_t
#define av_cold
Definition: attributes.h:82
#define av_malloc(s)
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
#define PNG_COLOR_TYPE_RGB_ALPHA
Definition: png.h:34
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
AVOptions.
Stereo 3D type: this structure describes how two videos are packed within a single video surface...
Definition: stereo3d.h:176
#define f(width, name)
Definition: cbs_vp9.c:255
static void sub_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top, int w, int bpp)
Definition: pngenc.c:121
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict, APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
Definition: pngenc.c:712
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
Definition: pixfmt.h:205
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:388
uint32_t sequence_number
Definition: pngenc.c:41
uint8_t blend_op
Definition: pngenc.c:45
Public header for CRC hash function implementation.
void(* diff_bytes)(uint8_t *dst, const uint8_t *src1, const uint8_t *src2, intptr_t w)
Structure to hold side data for an AVFrame.
Definition: frame.h:201
static void input_data(MLPEncodeContext *ctx, void *samples)
Wrapper function for inputting data in two different bit-depths.
Definition: mlpenc.c:1275
uint8_t * data
Definition: avcodec.h:1477
uint32_t tag
Definition: movenc.c:1496
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:55
ptrdiff_t size
Definition: opengl_enc.c:100
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:2789
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:443
#define AV_WB16(p, v)
Definition: intreadwrite.h:405
#define AV_INPUT_BUFFER_MIN_SIZE
minimum encoding buffer size Used to avoid some checks during header writing.
Definition: avcodec.h:797
#define av_log(a,...)
#define PNG_FILTER_VALUE_MIXED
Definition: png.h:43
int is_progressive
Definition: pngenc.c:63
AVCodec ff_apng_encoder
Definition: pngenc.c:1161
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: avcodec.h:1509
#define U(x)
Definition: vp56_arith.h:37
uint32_t palette_checksum
Definition: pngenc.c:69
also FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:448
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
int width
Definition: frame.h:353
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
uint32_t sequence_number
Definition: pngenc.c:70
int flags
Additional information about the frame packing.
Definition: stereo3d.h:185
16 bits gray, 16 bits alpha (big-endian)
Definition: pixfmt.h:212
static void png_get_interlaced_row(uint8_t *dst, int row_size, int bits_per_pixel, int pass, const uint8_t *src, int width)
Definition: pngenc.c:82
uint32_t width
Definition: pngenc.c:42
static const uint16_t mask[17]
Definition: lzw.c:38
z_stream zstream
Definition: pngenc.c:58
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
int dpm
Physical pixel density, in dots per meter, if set.
Definition: pngenc.c:61
int ff_png_get_nb_channels(int color_type)
Definition: png.c:49
#define AV_WB32_PNG(buf, n)
Definition: pngenc.c:292
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:1645
#define PNGSIG
Definition: png.h:47
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP177 Annex B
Definition: pixfmt.h:445
simple assert() macros that are a bit more flexible than ISO C assert().
static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:345
GLsizei GLsizei * length
Definition: opengl_enc.c:114
const char * name
Name of the codec implementation.
Definition: avcodec.h:3488
#define NB_PASSES
Definition: png.h:45
#define fail()
Definition: checkasm.h:120
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: avcodec.h:1037
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:792
int flags
A combination of AV_PKT_FLAG values.
Definition: avcodec.h:1483
#define pass
Definition: fft_template.c:619
uint8_t * bytestream_start
Definition: pngenc.c:53
#define b
Definition: input.c:41
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:378
static int output_data(MLPDecodeContext *m, unsigned int substr, AVFrame *frame, int *got_frame_ptr)
Write the audio data into the output buffer.
Definition: mlpdec.c:1062
alias for AV_PIX_FMT_YA8
Definition: pixfmt.h:146
#define FFMIN(a, b)
Definition: common.h:96
#define PNG_FILTER_VALUE_SUB
Definition: png.h:39
static void png_write_chunk(uint8_t **f, uint32_t tag, const uint8_t *buf, int length)
Definition: pngenc.c:224
#define PNG_COLOR_TYPE_GRAY
Definition: png.h:31
int width
picture width / height.
Definition: avcodec.h:1738
uint8_t w
Definition: llviddspenc.c:38
uint16_t delay_num
Definition: pngenc.c:44
static const AVOption options[]
Definition: pngenc.c:1113
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:450
uint32_t height
Definition: pngenc.c:42
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
#define s(width, name)
Definition: cbs_vp9.c:257
uint8_t * last_frame_packet
Definition: pngenc.c:78
static volatile int checksum
Definition: adler32.c:30
static int encode_apng(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:841
static const float pred[4]
Definition: siprdata.h:259
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:368
static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
Definition: pngenc.c:335
The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was...
Definition: avcodec.h:1199
#define IOBUF_SIZE
Definition: pngenc.c:38
#define src1
Definition: h264pred.c:139
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:180
#define av_bswap32
Definition: bswap.h:33
AVCodec ff_png_encoder
Definition: pngenc.c:1140
Libavcodec external API header.
functionally identical to above
Definition: pixfmt.h:452
enum AVCodecID codec_id
Definition: avcodec.h:1575
int compression_level
Definition: avcodec.h:1637
attribute_deprecated int prediction_method
Definition: avcodec.h:1924
#define PNG_FILTER_VALUE_UP
Definition: png.h:40
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
#define abs(x)
Definition: cuda_runtime.h:35
static void png_write_image_data(AVCodecContext *avctx, const uint8_t *buf, int length)
Definition: pngenc.c:243
static const int16_t alpha[]
Definition: ilbcdata.h:55
main external API structure.
Definition: avcodec.h:1565
uint8_t * data
Definition: frame.h:203
void * buf
Definition: avisynth_c.h:766
const uint8_t ff_png_pass_ymask[NB_PASSES]
Definition: png.c:25
Replacements for frequently missing libm functions.
Describe the class of an AVClass context structure.
Definition: log.h:67
#define AV_WB32(p, v)
Definition: intreadwrite.h:419
Y , 16bpp, big-endian.
Definition: pixfmt.h:97
int dpi
Physical pixel density, in dots per inch, if set.
Definition: pngenc.c:60
#define VE
Definition: pngenc.c:1112
static av_cold int png_enc_init(AVCodecContext *avctx)
Definition: pngenc.c:989
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
uint8_t * bytestream_end
Definition: pngenc.c:54
static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:431
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:275
uint8_t dispose_op
Definition: pngenc.c:45
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:324
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
uint8_t buf[IOBUF_SIZE]
Definition: pngenc.c:59
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb...
Definition: pixfmt.h:76
static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
Definition: pngenc.c:271
int extra_data_updated
Definition: pngenc.c:71
Y , 8bpp.
Definition: pixfmt.h:74
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:481
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
common internal api header.
#define PNG_FILTER_VALUE_NONE
Definition: png.h:38
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:102
Stereoscopic video.
attribute_deprecated AVFrame * coded_frame
the picture in the bitstream
Definition: avcodec.h:2815
int den
Denominator.
Definition: rational.h:60
void ff_png_zfree(void *opaque, void *ptr)
Definition: png.c:44
static void deflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:164
#define MKBETAG(a, b, c, d)
Definition: common.h:367
void * priv_data
Definition: avcodec.h:1592
uint8_t * extra_data
Definition: pngenc.c:72
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:85
int len
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:373
APNG common header.
static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
Definition: pngenc.c:293
enum AVColorPrimaries color_primaries
Definition: frame.h:541
int ff_png_pass_row_size(int pass, int bits_per_pixel, int width)
Definition: png.c:62
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1476
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:451
int frame_number
Frame counter, set by libavcodec.
Definition: avcodec.h:2256
ITU-R BT2020.
Definition: pixfmt.h:454
int height
Definition: frame.h:353
#define av_freep(p)
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:543
uint16_t delay_den
Definition: pngenc.c:44
static int apng_do_inverse_blend(AVFrame *output, const AVFrame *input, APNGFctlChunk *fctl_chunk, uint8_t bpp)
Definition: pngenc.c:569
AVFrame * last_frame
Definition: pngenc.c:76
static av_cold int png_enc_close(AVCodecContext *avctx)
Definition: pngenc.c:1098
#define FFSWAP(type, a, b)
Definition: common.h:99
uint8_t * av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, int size)
Allocate new information of a packet.
Definition: avpacket.c:329
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
#define MKTAG(a, b, c, d)
Definition: common.h:366
void * ff_png_zalloc(void *opaque, unsigned int items, unsigned int size)
Definition: png.c:39
Stereoscopic 3d metadata.
Definition: frame.h:63
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
This structure stores compressed data.
Definition: avcodec.h:1454
uint32_t AVCRC
Definition: crc.h:47
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1470
for(j=16;j >0;--j)
AVFrame * prev_frame
Definition: pngenc.c:75
LLVidEncDSPContext llvidencdsp
Definition: pngenc.c:50
#define AV_WL32(p, v)
Definition: intreadwrite.h:426
static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
Definition: pngenc.c:149