FFmpeg
pngdec.c
Go to the documentation of this file.
1 /*
2  * PNG image format
3  * Copyright (c) 2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 //#define DEBUG
23 
24 #include "libavutil/avassert.h"
25 #include "libavutil/bprint.h"
26 #include "libavutil/crc.h"
27 #include "libavutil/imgutils.h"
28 #include "libavutil/intreadwrite.h"
29 #include "libavutil/stereo3d.h"
31 
32 #include "avcodec.h"
33 #include "bytestream.h"
34 #include "internal.h"
35 #include "apng.h"
36 #include "png.h"
37 #include "pngdsp.h"
38 #include "thread.h"
39 
40 #include <zlib.h>
41 
43  PNG_IHDR = 1 << 0,
44  PNG_PLTE = 1 << 1,
45 };
46 
48  PNG_IDAT = 1 << 0,
49  PNG_ALLIMAGE = 1 << 1,
50 };
51 
52 typedef struct PNGDecContext {
55 
59 
62  int width, height;
63  int cur_w, cur_h;
64  int last_w, last_h;
69  int bit_depth;
74  int channels;
76  int bpp;
77  int has_trns;
79 
82  uint32_t palette[256];
85  unsigned int last_row_size;
87  unsigned int tmp_row_size;
90  int pass;
91  int crow_size; /* compressed row size (include filter type) */
92  int row_size; /* decompressed row size */
93  int pass_row_size; /* decompress row size of the current pass */
94  int y;
95  z_stream zstream;
97 
98 /* Mask to determine which pixels are valid in a pass */
99 static const uint8_t png_pass_mask[NB_PASSES] = {
100  0x01, 0x01, 0x11, 0x11, 0x55, 0x55, 0xff,
101 };
102 
103 /* Mask to determine which y pixels can be written in a pass */
105  0xff, 0xff, 0x0f, 0xff, 0x33, 0xff, 0x55,
106 };
107 
108 /* Mask to determine which pixels to overwrite while displaying */
110  0xff, 0x0f, 0xff, 0x33, 0xff, 0x55, 0xff
111 };
112 
113 /* NOTE: we try to construct a good looking image at each pass. width
114  * is the original image width. We also do pixel format conversion at
115  * this stage */
116 static void png_put_interlaced_row(uint8_t *dst, int width,
117  int bits_per_pixel, int pass,
118  int color_type, const uint8_t *src)
119 {
120  int x, mask, dsp_mask, j, src_x, b, bpp;
121  uint8_t *d;
122  const uint8_t *s;
123 
124  mask = png_pass_mask[pass];
125  dsp_mask = png_pass_dsp_mask[pass];
126 
127  switch (bits_per_pixel) {
128  case 1:
129  src_x = 0;
130  for (x = 0; x < width; x++) {
131  j = (x & 7);
132  if ((dsp_mask << j) & 0x80) {
133  b = (src[src_x >> 3] >> (7 - (src_x & 7))) & 1;
134  dst[x >> 3] &= 0xFF7F>>j;
135  dst[x >> 3] |= b << (7 - j);
136  }
137  if ((mask << j) & 0x80)
138  src_x++;
139  }
140  break;
141  case 2:
142  src_x = 0;
143  for (x = 0; x < width; x++) {
144  int j2 = 2 * (x & 3);
145  j = (x & 7);
146  if ((dsp_mask << j) & 0x80) {
147  b = (src[src_x >> 2] >> (6 - 2*(src_x & 3))) & 3;
148  dst[x >> 2] &= 0xFF3F>>j2;
149  dst[x >> 2] |= b << (6 - j2);
150  }
151  if ((mask << j) & 0x80)
152  src_x++;
153  }
154  break;
155  case 4:
156  src_x = 0;
157  for (x = 0; x < width; x++) {
158  int j2 = 4*(x&1);
159  j = (x & 7);
160  if ((dsp_mask << j) & 0x80) {
161  b = (src[src_x >> 1] >> (4 - 4*(src_x & 1))) & 15;
162  dst[x >> 1] &= 0xFF0F>>j2;
163  dst[x >> 1] |= b << (4 - j2);
164  }
165  if ((mask << j) & 0x80)
166  src_x++;
167  }
168  break;
169  default:
170  bpp = bits_per_pixel >> 3;
171  d = dst;
172  s = src;
173  for (x = 0; x < width; x++) {
174  j = x & 7;
175  if ((dsp_mask << j) & 0x80) {
176  memcpy(d, s, bpp);
177  }
178  d += bpp;
179  if ((mask << j) & 0x80)
180  s += bpp;
181  }
182  break;
183  }
184 }
185 
187  int w, int bpp)
188 {
189  int i;
190  for (i = 0; i < w; i++) {
191  int a, b, c, p, pa, pb, pc;
192 
193  a = dst[i - bpp];
194  b = top[i];
195  c = top[i - bpp];
196 
197  p = b - c;
198  pc = a - c;
199 
200  pa = abs(p);
201  pb = abs(pc);
202  pc = abs(p + pc);
203 
204  if (pa <= pb && pa <= pc)
205  p = a;
206  else if (pb <= pc)
207  p = b;
208  else
209  p = c;
210  dst[i] = p + src[i];
211  }
212 }
213 
214 #define UNROLL1(bpp, op) \
215  { \
216  r = dst[0]; \
217  if (bpp >= 2) \
218  g = dst[1]; \
219  if (bpp >= 3) \
220  b = dst[2]; \
221  if (bpp >= 4) \
222  a = dst[3]; \
223  for (; i <= size - bpp; i += bpp) { \
224  dst[i + 0] = r = op(r, src[i + 0], last[i + 0]); \
225  if (bpp == 1) \
226  continue; \
227  dst[i + 1] = g = op(g, src[i + 1], last[i + 1]); \
228  if (bpp == 2) \
229  continue; \
230  dst[i + 2] = b = op(b, src[i + 2], last[i + 2]); \
231  if (bpp == 3) \
232  continue; \
233  dst[i + 3] = a = op(a, src[i + 3], last[i + 3]); \
234  } \
235  }
236 
237 #define UNROLL_FILTER(op) \
238  if (bpp == 1) { \
239  UNROLL1(1, op) \
240  } else if (bpp == 2) { \
241  UNROLL1(2, op) \
242  } else if (bpp == 3) { \
243  UNROLL1(3, op) \
244  } else if (bpp == 4) { \
245  UNROLL1(4, op) \
246  } \
247  for (; i < size; i++) { \
248  dst[i] = op(dst[i - bpp], src[i], last[i]); \
249  }
250 
251 /* NOTE: 'dst' can be equal to 'last' */
253  uint8_t *src, uint8_t *last, int size, int bpp)
254 {
255  int i, p, r, g, b, a;
256 
257  switch (filter_type) {
259  memcpy(dst, src, size);
260  break;
262  for (i = 0; i < bpp; i++)
263  dst[i] = src[i];
264  if (bpp == 4) {
265  p = *(int *)dst;
266  for (; i < size; i += bpp) {
267  unsigned s = *(int *)(src + i);
268  p = ((s & 0x7f7f7f7f) + (p & 0x7f7f7f7f)) ^ ((s ^ p) & 0x80808080);
269  *(int *)(dst + i) = p;
270  }
271  } else {
272 #define OP_SUB(x, s, l) ((x) + (s))
274  }
275  break;
276  case PNG_FILTER_VALUE_UP:
277  dsp->add_bytes_l2(dst, src, last, size);
278  break;
280  for (i = 0; i < bpp; i++) {
281  p = (last[i] >> 1);
282  dst[i] = p + src[i];
283  }
284 #define OP_AVG(x, s, l) (((((x) + (l)) >> 1) + (s)) & 0xff)
286  break;
288  for (i = 0; i < bpp; i++) {
289  p = last[i];
290  dst[i] = p + src[i];
291  }
292  if (bpp > 2 && size > 4) {
293  /* would write off the end of the array if we let it process
294  * the last pixel with bpp=3 */
295  int w = (bpp & 3) ? size - 3 : size;
296 
297  if (w > i) {
298  dsp->add_paeth_prediction(dst + i, src + i, last + i, size - i, bpp);
299  i = w;
300  }
301  }
302  ff_add_png_paeth_prediction(dst + i, src + i, last + i, size - i, bpp);
303  break;
304  }
305 }
306 
307 /* This used to be called "deloco" in FFmpeg
308  * and is actually an inverse reversible colorspace transformation */
309 #define YUV2RGB(NAME, TYPE) \
310 static void deloco_ ## NAME(TYPE *dst, int size, int alpha) \
311 { \
312  int i; \
313  for (i = 0; i < size; i += 3 + alpha) { \
314  int g = dst [i + 1]; \
315  dst[i + 0] += g; \
316  dst[i + 2] += g; \
317  } \
318 }
319 
320 YUV2RGB(rgb8, uint8_t)
321 YUV2RGB(rgb16, uint16_t)
322 
324 {
325  if (s->interlace_type) {
326  return 100 - 100 * s->pass / (NB_PASSES - 1);
327  } else {
328  return 100 - 100 * s->y / s->cur_h;
329  }
330 }
331 
332 /* process exactly one decompressed row */
334 {
335  uint8_t *ptr, *last_row;
336  int got_line;
337 
338  if (!s->interlace_type) {
339  ptr = s->image_buf + s->image_linesize * (s->y + s->y_offset) + s->x_offset * s->bpp;
340  if (s->y == 0)
341  last_row = s->last_row;
342  else
343  last_row = ptr - s->image_linesize;
344 
345  ff_png_filter_row(&s->dsp, ptr, s->crow_buf[0], s->crow_buf + 1,
346  last_row, s->row_size, s->bpp);
347  /* loco lags by 1 row so that it doesn't interfere with top prediction */
348  if (s->filter_type == PNG_FILTER_TYPE_LOCO && s->y > 0) {
349  if (s->bit_depth == 16) {
350  deloco_rgb16((uint16_t *)(ptr - s->image_linesize), s->row_size / 2,
352  } else {
353  deloco_rgb8(ptr - s->image_linesize, s->row_size,
355  }
356  }
357  s->y++;
358  if (s->y == s->cur_h) {
359  s->pic_state |= PNG_ALLIMAGE;
360  if (s->filter_type == PNG_FILTER_TYPE_LOCO) {
361  if (s->bit_depth == 16) {
362  deloco_rgb16((uint16_t *)ptr, s->row_size / 2,
364  } else {
365  deloco_rgb8(ptr, s->row_size,
367  }
368  }
369  }
370  } else {
371  got_line = 0;
372  for (;;) {
373  ptr = s->image_buf + s->image_linesize * (s->y + s->y_offset) + s->x_offset * s->bpp;
374  if ((ff_png_pass_ymask[s->pass] << (s->y & 7)) & 0x80) {
375  /* if we already read one row, it is time to stop to
376  * wait for the next one */
377  if (got_line)
378  break;
379  ff_png_filter_row(&s->dsp, s->tmp_row, s->crow_buf[0], s->crow_buf + 1,
380  s->last_row, s->pass_row_size, s->bpp);
381  FFSWAP(uint8_t *, s->last_row, s->tmp_row);
382  FFSWAP(unsigned int, s->last_row_size, s->tmp_row_size);
383  got_line = 1;
384  }
385  if ((png_pass_dsp_ymask[s->pass] << (s->y & 7)) & 0x80) {
387  s->color_type, s->last_row);
388  }
389  s->y++;
390  if (s->y == s->cur_h) {
391  memset(s->last_row, 0, s->row_size);
392  for (;;) {
393  if (s->pass == NB_PASSES - 1) {
394  s->pic_state |= PNG_ALLIMAGE;
395  goto the_end;
396  } else {
397  s->pass++;
398  s->y = 0;
400  s->bits_per_pixel,
401  s->cur_w);
402  s->crow_size = s->pass_row_size + 1;
403  if (s->pass_row_size != 0)
404  break;
405  /* skip pass if empty row */
406  }
407  }
408  }
409  }
410 the_end:;
411  }
412 }
413 
415 {
416  int ret;
417  s->zstream.avail_in = FFMIN(length, bytestream2_get_bytes_left(&s->gb));
418  s->zstream.next_in = s->gb.buffer;
419  bytestream2_skip(&s->gb, length);
420 
421  /* decode one line if possible */
422  while (s->zstream.avail_in > 0) {
423  ret = inflate(&s->zstream, Z_PARTIAL_FLUSH);
424  if (ret != Z_OK && ret != Z_STREAM_END) {
425  av_log(s->avctx, AV_LOG_ERROR, "inflate returned error %d\n", ret);
426  return AVERROR_EXTERNAL;
427  }
428  if (s->zstream.avail_out == 0) {
429  if (!(s->pic_state & PNG_ALLIMAGE)) {
430  png_handle_row(s);
431  }
432  s->zstream.avail_out = s->crow_size;
433  s->zstream.next_out = s->crow_buf;
434  }
435  if (ret == Z_STREAM_END && s->zstream.avail_in > 0) {
437  "%d undecompressed bytes left in buffer\n", s->zstream.avail_in);
438  return 0;
439  }
440  }
441  return 0;
442 }
443 
444 static int decode_zbuf(AVBPrint *bp, const uint8_t *data,
445  const uint8_t *data_end)
446 {
447  z_stream zstream;
448  unsigned char *buf;
449  unsigned buf_size;
450  int ret;
451 
452  zstream.zalloc = ff_png_zalloc;
453  zstream.zfree = ff_png_zfree;
454  zstream.opaque = NULL;
455  if (inflateInit(&zstream) != Z_OK)
456  return AVERROR_EXTERNAL;
457  zstream.next_in = data;
458  zstream.avail_in = data_end - data;
460 
461  while (zstream.avail_in > 0) {
462  av_bprint_get_buffer(bp, 2, &buf, &buf_size);
463  if (buf_size < 2) {
464  ret = AVERROR(ENOMEM);
465  goto fail;
466  }
467  zstream.next_out = buf;
468  zstream.avail_out = buf_size - 1;
469  ret = inflate(&zstream, Z_PARTIAL_FLUSH);
470  if (ret != Z_OK && ret != Z_STREAM_END) {
471  ret = AVERROR_EXTERNAL;
472  goto fail;
473  }
474  bp->len += zstream.next_out - buf;
475  if (ret == Z_STREAM_END)
476  break;
477  }
478  inflateEnd(&zstream);
479  bp->str[bp->len] = 0;
480  return 0;
481 
482 fail:
483  inflateEnd(&zstream);
485  return ret;
486 }
487 
488 static uint8_t *iso88591_to_utf8(const uint8_t *in, size_t size_in)
489 {
490  size_t extra = 0, i;
491  uint8_t *out, *q;
492 
493  for (i = 0; i < size_in; i++)
494  extra += in[i] >= 0x80;
495  if (size_in == SIZE_MAX || extra > SIZE_MAX - size_in - 1)
496  return NULL;
497  q = out = av_malloc(size_in + extra + 1);
498  if (!out)
499  return NULL;
500  for (i = 0; i < size_in; i++) {
501  if (in[i] >= 0x80) {
502  *(q++) = 0xC0 | (in[i] >> 6);
503  *(q++) = 0x80 | (in[i] & 0x3F);
504  } else {
505  *(q++) = in[i];
506  }
507  }
508  *(q++) = 0;
509  return out;
510 }
511 
512 static int decode_text_chunk(PNGDecContext *s, uint32_t length, int compressed,
513  AVDictionary **dict)
514 {
515  int ret, method;
516  const uint8_t *data = s->gb.buffer;
517  const uint8_t *data_end = data + length;
518  const uint8_t *keyword = data;
519  const uint8_t *keyword_end = memchr(keyword, 0, data_end - keyword);
520  uint8_t *kw_utf8 = NULL, *text, *txt_utf8 = NULL;
521  unsigned text_len;
522  AVBPrint bp;
523 
524  if (!keyword_end)
525  return AVERROR_INVALIDDATA;
526  data = keyword_end + 1;
527 
528  if (compressed) {
529  if (data == data_end)
530  return AVERROR_INVALIDDATA;
531  method = *(data++);
532  if (method)
533  return AVERROR_INVALIDDATA;
534  if ((ret = decode_zbuf(&bp, data, data_end)) < 0)
535  return ret;
536  text_len = bp.len;
537  ret = av_bprint_finalize(&bp, (char **)&text);
538  if (ret < 0)
539  return ret;
540  } else {
541  text = (uint8_t *)data;
542  text_len = data_end - text;
543  }
544 
545  kw_utf8 = iso88591_to_utf8(keyword, keyword_end - keyword);
546  txt_utf8 = iso88591_to_utf8(text, text_len);
547  if (text != data)
548  av_free(text);
549  if (!(kw_utf8 && txt_utf8)) {
550  av_free(kw_utf8);
551  av_free(txt_utf8);
552  return AVERROR(ENOMEM);
553  }
554 
555  av_dict_set(dict, kw_utf8, txt_utf8,
557  return 0;
558 }
559 
561  uint32_t length)
562 {
563  if (length != 13)
564  return AVERROR_INVALIDDATA;
565 
566  if (s->pic_state & PNG_IDAT) {
567  av_log(avctx, AV_LOG_ERROR, "IHDR after IDAT\n");
568  return AVERROR_INVALIDDATA;
569  }
570 
571  if (s->hdr_state & PNG_IHDR) {
572  av_log(avctx, AV_LOG_ERROR, "Multiple IHDR\n");
573  return AVERROR_INVALIDDATA;
574  }
575 
576  s->width = s->cur_w = bytestream2_get_be32(&s->gb);
577  s->height = s->cur_h = bytestream2_get_be32(&s->gb);
578  if (av_image_check_size(s->width, s->height, 0, avctx)) {
579  s->cur_w = s->cur_h = s->width = s->height = 0;
580  av_log(avctx, AV_LOG_ERROR, "Invalid image size\n");
581  return AVERROR_INVALIDDATA;
582  }
583  s->bit_depth = bytestream2_get_byte(&s->gb);
584  if (s->bit_depth != 1 && s->bit_depth != 2 && s->bit_depth != 4 &&
585  s->bit_depth != 8 && s->bit_depth != 16) {
586  av_log(avctx, AV_LOG_ERROR, "Invalid bit depth\n");
587  goto error;
588  }
589  s->color_type = bytestream2_get_byte(&s->gb);
590  s->compression_type = bytestream2_get_byte(&s->gb);
591  if (s->compression_type) {
592  av_log(avctx, AV_LOG_ERROR, "Invalid compression method %d\n", s->compression_type);
593  goto error;
594  }
595  s->filter_type = bytestream2_get_byte(&s->gb);
596  s->interlace_type = bytestream2_get_byte(&s->gb);
597  bytestream2_skip(&s->gb, 4); /* crc */
598  s->hdr_state |= PNG_IHDR;
599  if (avctx->debug & FF_DEBUG_PICT_INFO)
600  av_log(avctx, AV_LOG_DEBUG, "width=%d height=%d depth=%d color_type=%d "
601  "compression_type=%d filter_type=%d interlace_type=%d\n",
602  s->width, s->height, s->bit_depth, s->color_type,
604 
605  return 0;
606 error:
607  s->cur_w = s->cur_h = s->width = s->height = 0;
608  s->bit_depth = 8;
609  return AVERROR_INVALIDDATA;
610 }
611 
613 {
614  if (s->pic_state & PNG_IDAT) {
615  av_log(avctx, AV_LOG_ERROR, "pHYs after IDAT\n");
616  return AVERROR_INVALIDDATA;
617  }
618  avctx->sample_aspect_ratio.num = bytestream2_get_be32(&s->gb);
619  avctx->sample_aspect_ratio.den = bytestream2_get_be32(&s->gb);
620  if (avctx->sample_aspect_ratio.num < 0 || avctx->sample_aspect_ratio.den < 0)
621  avctx->sample_aspect_ratio = (AVRational){ 0, 1 };
622  bytestream2_skip(&s->gb, 1); /* unit specifier */
623  bytestream2_skip(&s->gb, 4); /* crc */
624 
625  return 0;
626 }
627 
629  uint32_t length, AVFrame *p)
630 {
631  int ret;
632  size_t byte_depth = s->bit_depth > 8 ? 2 : 1;
633 
634  if (!(s->hdr_state & PNG_IHDR)) {
635  av_log(avctx, AV_LOG_ERROR, "IDAT without IHDR\n");
636  return AVERROR_INVALIDDATA;
637  }
638  if (!(s->pic_state & PNG_IDAT)) {
639  /* init image info */
640  ret = ff_set_dimensions(avctx, s->width, s->height);
641  if (ret < 0)
642  return ret;
643 
645  s->bits_per_pixel = s->bit_depth * s->channels;
646  s->bpp = (s->bits_per_pixel + 7) >> 3;
647  s->row_size = (s->cur_w * s->bits_per_pixel + 7) >> 3;
648 
649  if ((s->bit_depth == 2 || s->bit_depth == 4 || s->bit_depth == 8) &&
651  avctx->pix_fmt = AV_PIX_FMT_RGB24;
652  } else if ((s->bit_depth == 2 || s->bit_depth == 4 || s->bit_depth == 8) &&
654  avctx->pix_fmt = AV_PIX_FMT_RGBA;
655  } else if ((s->bit_depth == 2 || s->bit_depth == 4 || s->bit_depth == 8) &&
657  avctx->pix_fmt = AV_PIX_FMT_GRAY8;
658  } else if (s->bit_depth == 16 &&
660  avctx->pix_fmt = AV_PIX_FMT_GRAY16BE;
661  } else if (s->bit_depth == 16 &&
663  avctx->pix_fmt = AV_PIX_FMT_RGB48BE;
664  } else if (s->bit_depth == 16 &&
666  avctx->pix_fmt = AV_PIX_FMT_RGBA64BE;
667  } else if ((s->bits_per_pixel == 1 || s->bits_per_pixel == 2 || s->bits_per_pixel == 4 || s->bits_per_pixel == 8) &&
669  avctx->pix_fmt = AV_PIX_FMT_PAL8;
670  } else if (s->bit_depth == 1 && s->bits_per_pixel == 1 && avctx->codec_id != AV_CODEC_ID_APNG) {
671  avctx->pix_fmt = AV_PIX_FMT_MONOBLACK;
672  } else if (s->bit_depth == 8 &&
674  avctx->pix_fmt = AV_PIX_FMT_YA8;
675  } else if (s->bit_depth == 16 &&
677  avctx->pix_fmt = AV_PIX_FMT_YA16BE;
678  } else {
680  "Bit depth %d color type %d",
681  s->bit_depth, s->color_type);
682  return AVERROR_PATCHWELCOME;
683  }
684 
685  if (s->has_trns && s->color_type != PNG_COLOR_TYPE_PALETTE) {
686  switch (avctx->pix_fmt) {
687  case AV_PIX_FMT_RGB24:
688  avctx->pix_fmt = AV_PIX_FMT_RGBA;
689  break;
690 
691  case AV_PIX_FMT_RGB48BE:
692  avctx->pix_fmt = AV_PIX_FMT_RGBA64BE;
693  break;
694 
695  case AV_PIX_FMT_GRAY8:
696  avctx->pix_fmt = AV_PIX_FMT_YA8;
697  break;
698 
699  case AV_PIX_FMT_GRAY16BE:
700  avctx->pix_fmt = AV_PIX_FMT_YA16BE;
701  break;
702 
703  default:
704  avpriv_request_sample(avctx, "bit depth %d "
705  "and color type %d with TRNS",
706  s->bit_depth, s->color_type);
707  return AVERROR_INVALIDDATA;
708  }
709 
710  s->bpp += byte_depth;
711  }
712 
713  ff_thread_release_buffer(avctx, &s->picture);
714  if ((ret = ff_thread_get_buffer(avctx, &s->picture, AV_GET_BUFFER_FLAG_REF)) < 0)
715  return ret;
716 
718  p->key_frame = 1;
720 
721  ff_thread_finish_setup(avctx);
722 
723  /* compute the compressed row size */
724  if (!s->interlace_type) {
725  s->crow_size = s->row_size + 1;
726  } else {
727  s->pass = 0;
729  s->bits_per_pixel,
730  s->cur_w);
731  s->crow_size = s->pass_row_size + 1;
732  }
733  ff_dlog(avctx, "row_size=%d crow_size =%d\n",
734  s->row_size, s->crow_size);
735  s->image_buf = p->data[0];
736  s->image_linesize = p->linesize[0];
737  /* copy the palette if needed */
738  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
739  memcpy(p->data[1], s->palette, 256 * sizeof(uint32_t));
740  /* empty row is used if differencing to the first row */
742  if (!s->last_row)
743  return AVERROR_INVALIDDATA;
744  if (s->interlace_type ||
747  if (!s->tmp_row)
748  return AVERROR_INVALIDDATA;
749  }
750  /* compressed row */
752  if (!s->buffer)
753  return AVERROR(ENOMEM);
754 
755  /* we want crow_buf+1 to be 16-byte aligned */
756  s->crow_buf = s->buffer + 15;
757  s->zstream.avail_out = s->crow_size;
758  s->zstream.next_out = s->crow_buf;
759  }
760 
761  s->pic_state |= PNG_IDAT;
762 
763  /* set image to non-transparent bpp while decompressing */
765  s->bpp -= byte_depth;
766 
767  ret = png_decode_idat(s, length);
768 
770  s->bpp += byte_depth;
771 
772  if (ret < 0)
773  return ret;
774 
775  bytestream2_skip(&s->gb, 4); /* crc */
776 
777  return 0;
778 }
779 
781  uint32_t length)
782 {
783  int n, i, r, g, b;
784 
785  if ((length % 3) != 0 || length > 256 * 3)
786  return AVERROR_INVALIDDATA;
787  /* read the palette */
788  n = length / 3;
789  for (i = 0; i < n; i++) {
790  r = bytestream2_get_byte(&s->gb);
791  g = bytestream2_get_byte(&s->gb);
792  b = bytestream2_get_byte(&s->gb);
793  s->palette[i] = (0xFFU << 24) | (r << 16) | (g << 8) | b;
794  }
795  for (; i < 256; i++)
796  s->palette[i] = (0xFFU << 24);
797  s->hdr_state |= PNG_PLTE;
798  bytestream2_skip(&s->gb, 4); /* crc */
799 
800  return 0;
801 }
802 
804  uint32_t length)
805 {
806  int v, i;
807 
808  if (!(s->hdr_state & PNG_IHDR)) {
809  av_log(avctx, AV_LOG_ERROR, "trns before IHDR\n");
810  return AVERROR_INVALIDDATA;
811  }
812 
813  if (s->pic_state & PNG_IDAT) {
814  av_log(avctx, AV_LOG_ERROR, "trns after IDAT\n");
815  return AVERROR_INVALIDDATA;
816  }
817 
819  if (length > 256 || !(s->hdr_state & PNG_PLTE))
820  return AVERROR_INVALIDDATA;
821 
822  for (i = 0; i < length; i++) {
823  unsigned v = bytestream2_get_byte(&s->gb);
824  s->palette[i] = (s->palette[i] & 0x00ffffff) | (v << 24);
825  }
826  } else if (s->color_type == PNG_COLOR_TYPE_GRAY || s->color_type == PNG_COLOR_TYPE_RGB) {
827  if ((s->color_type == PNG_COLOR_TYPE_GRAY && length != 2) ||
828  (s->color_type == PNG_COLOR_TYPE_RGB && length != 6) ||
829  s->bit_depth == 1)
830  return AVERROR_INVALIDDATA;
831 
832  for (i = 0; i < length / 2; i++) {
833  /* only use the least significant bits */
834  v = av_mod_uintp2(bytestream2_get_be16(&s->gb), s->bit_depth);
835 
836  if (s->bit_depth > 8)
837  AV_WB16(&s->transparent_color_be[2 * i], v);
838  else
839  s->transparent_color_be[i] = v;
840  }
841  } else {
842  return AVERROR_INVALIDDATA;
843  }
844 
845  bytestream2_skip(&s->gb, 4); /* crc */
846  s->has_trns = 1;
847 
848  return 0;
849 }
850 
852 {
853  int ret, cnt = 0;
854  uint8_t *data, profile_name[82];
855  AVBPrint bp;
856  AVFrameSideData *sd;
857 
858  while ((profile_name[cnt++] = bytestream2_get_byte(&s->gb)) && cnt < 81);
859  if (cnt > 80) {
860  av_log(s->avctx, AV_LOG_ERROR, "iCCP with invalid name!\n");
861  return AVERROR_INVALIDDATA;
862  }
863 
864  length = FFMAX(length - cnt, 0);
865 
866  if (bytestream2_get_byte(&s->gb) != 0) {
867  av_log(s->avctx, AV_LOG_ERROR, "iCCP with invalid compression!\n");
868  return AVERROR_INVALIDDATA;
869  }
870 
871  length = FFMAX(length - 1, 0);
872 
873  if ((ret = decode_zbuf(&bp, s->gb.buffer, s->gb.buffer + length)) < 0)
874  return ret;
875 
876  ret = av_bprint_finalize(&bp, (char **)&data);
877  if (ret < 0)
878  return ret;
879 
881  if (!sd) {
882  av_free(data);
883  return AVERROR(ENOMEM);
884  }
885 
886  av_dict_set(&sd->metadata, "name", profile_name, 0);
887  memcpy(sd->data, data, bp.len);
888  av_free(data);
889 
890  /* ICC compressed data and CRC */
891  bytestream2_skip(&s->gb, length + 4);
892 
893  return 0;
894 }
895 
897 {
898  if (s->bits_per_pixel == 1 && s->color_type == PNG_COLOR_TYPE_PALETTE) {
899  int i, j, k;
900  uint8_t *pd = p->data[0];
901  for (j = 0; j < s->height; j++) {
902  i = s->width / 8;
903  for (k = 7; k >= 1; k--)
904  if ((s->width&7) >= k)
905  pd[8*i + k - 1] = (pd[i]>>8-k) & 1;
906  for (i--; i >= 0; i--) {
907  pd[8*i + 7]= pd[i] & 1;
908  pd[8*i + 6]= (pd[i]>>1) & 1;
909  pd[8*i + 5]= (pd[i]>>2) & 1;
910  pd[8*i + 4]= (pd[i]>>3) & 1;
911  pd[8*i + 3]= (pd[i]>>4) & 1;
912  pd[8*i + 2]= (pd[i]>>5) & 1;
913  pd[8*i + 1]= (pd[i]>>6) & 1;
914  pd[8*i + 0]= pd[i]>>7;
915  }
916  pd += s->image_linesize;
917  }
918  } else if (s->bits_per_pixel == 2) {
919  int i, j;
920  uint8_t *pd = p->data[0];
921  for (j = 0; j < s->height; j++) {
922  i = s->width / 4;
924  if ((s->width&3) >= 3) pd[4*i + 2]= (pd[i] >> 2) & 3;
925  if ((s->width&3) >= 2) pd[4*i + 1]= (pd[i] >> 4) & 3;
926  if ((s->width&3) >= 1) pd[4*i + 0]= pd[i] >> 6;
927  for (i--; i >= 0; i--) {
928  pd[4*i + 3]= pd[i] & 3;
929  pd[4*i + 2]= (pd[i]>>2) & 3;
930  pd[4*i + 1]= (pd[i]>>4) & 3;
931  pd[4*i + 0]= pd[i]>>6;
932  }
933  } else {
934  if ((s->width&3) >= 3) pd[4*i + 2]= ((pd[i]>>2) & 3)*0x55;
935  if ((s->width&3) >= 2) pd[4*i + 1]= ((pd[i]>>4) & 3)*0x55;
936  if ((s->width&3) >= 1) pd[4*i + 0]= ( pd[i]>>6 )*0x55;
937  for (i--; i >= 0; i--) {
938  pd[4*i + 3]= ( pd[i] & 3)*0x55;
939  pd[4*i + 2]= ((pd[i]>>2) & 3)*0x55;
940  pd[4*i + 1]= ((pd[i]>>4) & 3)*0x55;
941  pd[4*i + 0]= ( pd[i]>>6 )*0x55;
942  }
943  }
944  pd += s->image_linesize;
945  }
946  } else if (s->bits_per_pixel == 4) {
947  int i, j;
948  uint8_t *pd = p->data[0];
949  for (j = 0; j < s->height; j++) {
950  i = s->width/2;
952  if (s->width&1) pd[2*i+0]= pd[i]>>4;
953  for (i--; i >= 0; i--) {
954  pd[2*i + 1] = pd[i] & 15;
955  pd[2*i + 0] = pd[i] >> 4;
956  }
957  } else {
958  if (s->width & 1) pd[2*i + 0]= (pd[i] >> 4) * 0x11;
959  for (i--; i >= 0; i--) {
960  pd[2*i + 1] = (pd[i] & 15) * 0x11;
961  pd[2*i + 0] = (pd[i] >> 4) * 0x11;
962  }
963  }
964  pd += s->image_linesize;
965  }
966  }
967 }
968 
970  uint32_t length)
971 {
972  uint32_t sequence_number;
974 
975  if (length != 26)
976  return AVERROR_INVALIDDATA;
977 
978  if (!(s->hdr_state & PNG_IHDR)) {
979  av_log(avctx, AV_LOG_ERROR, "fctl before IHDR\n");
980  return AVERROR_INVALIDDATA;
981  }
982 
983  if (s->pic_state & PNG_IDAT) {
984  av_log(avctx, AV_LOG_ERROR, "fctl after IDAT\n");
985  return AVERROR_INVALIDDATA;
986  }
987 
988  s->last_w = s->cur_w;
989  s->last_h = s->cur_h;
990  s->last_x_offset = s->x_offset;
991  s->last_y_offset = s->y_offset;
992  s->last_dispose_op = s->dispose_op;
993 
994  sequence_number = bytestream2_get_be32(&s->gb);
995  cur_w = bytestream2_get_be32(&s->gb);
996  cur_h = bytestream2_get_be32(&s->gb);
997  x_offset = bytestream2_get_be32(&s->gb);
998  y_offset = bytestream2_get_be32(&s->gb);
999  bytestream2_skip(&s->gb, 4); /* delay_num (2), delay_den (2) */
1000  dispose_op = bytestream2_get_byte(&s->gb);
1001  blend_op = bytestream2_get_byte(&s->gb);
1002  bytestream2_skip(&s->gb, 4); /* crc */
1003 
1004  if (sequence_number == 0 &&
1005  (cur_w != s->width ||
1006  cur_h != s->height ||
1007  x_offset != 0 ||
1008  y_offset != 0) ||
1009  cur_w <= 0 || cur_h <= 0 ||
1010  x_offset < 0 || y_offset < 0 ||
1011  cur_w > s->width - x_offset|| cur_h > s->height - y_offset)
1012  return AVERROR_INVALIDDATA;
1013 
1014  if (blend_op != APNG_BLEND_OP_OVER && blend_op != APNG_BLEND_OP_SOURCE) {
1015  av_log(avctx, AV_LOG_ERROR, "Invalid blend_op %d\n", blend_op);
1016  return AVERROR_INVALIDDATA;
1017  }
1018 
1019  if ((sequence_number == 0 || !s->last_picture.f->data[0]) &&
1020  dispose_op == APNG_DISPOSE_OP_PREVIOUS) {
1021  // No previous frame to revert to for the first frame
1022  // Spec says to just treat it as a APNG_DISPOSE_OP_BACKGROUND
1023  dispose_op = APNG_DISPOSE_OP_BACKGROUND;
1024  }
1025 
1026  if (blend_op == APNG_BLEND_OP_OVER && !s->has_trns && (
1027  avctx->pix_fmt == AV_PIX_FMT_RGB24 ||
1028  avctx->pix_fmt == AV_PIX_FMT_RGB48BE ||
1029  avctx->pix_fmt == AV_PIX_FMT_PAL8 ||
1030  avctx->pix_fmt == AV_PIX_FMT_GRAY8 ||
1031  avctx->pix_fmt == AV_PIX_FMT_GRAY16BE ||
1032  avctx->pix_fmt == AV_PIX_FMT_MONOBLACK
1033  )) {
1034  // APNG_BLEND_OP_OVER is the same as APNG_BLEND_OP_SOURCE when there is no alpha channel
1035  blend_op = APNG_BLEND_OP_SOURCE;
1036  }
1037 
1038  s->cur_w = cur_w;
1039  s->cur_h = cur_h;
1040  s->x_offset = x_offset;
1041  s->y_offset = y_offset;
1042  s->dispose_op = dispose_op;
1043  s->blend_op = blend_op;
1044 
1045  return 0;
1046 }
1047 
1049 {
1050  int i, j;
1051  uint8_t *pd = p->data[0];
1052  uint8_t *pd_last = s->last_picture.f->data[0];
1053  int ls = FFMIN(av_image_get_linesize(p->format, s->width, 0), s->width * s->bpp);
1054 
1055  ff_thread_await_progress(&s->last_picture, INT_MAX, 0);
1056  for (j = 0; j < s->height; j++) {
1057  for (i = 0; i < ls; i++)
1058  pd[i] += pd_last[i];
1059  pd += s->image_linesize;
1060  pd_last += s->image_linesize;
1061  }
1062 }
1063 
1064 // divide by 255 and round to nearest
1065 // apply a fast variant: (X+127)/255 = ((X+127)*257+257)>>16 = ((X+128)*257)>>16
1066 #define FAST_DIV255(x) ((((x) + 128) * 257) >> 16)
1067 
1069  AVFrame *p)
1070 {
1071  size_t x, y;
1072  uint8_t *buffer;
1073 
1074  if (s->blend_op == APNG_BLEND_OP_OVER &&
1075  avctx->pix_fmt != AV_PIX_FMT_RGBA &&
1076  avctx->pix_fmt != AV_PIX_FMT_GRAY8A &&
1077  avctx->pix_fmt != AV_PIX_FMT_PAL8) {
1078  avpriv_request_sample(avctx, "Blending with pixel format %s",
1079  av_get_pix_fmt_name(avctx->pix_fmt));
1080  return AVERROR_PATCHWELCOME;
1081  }
1082 
1083  buffer = av_malloc_array(s->image_linesize, s->height);
1084  if (!buffer)
1085  return AVERROR(ENOMEM);
1086 
1087  ff_thread_await_progress(&s->last_picture, INT_MAX, 0);
1088 
1089  // need to reset a rectangle to background:
1090  // create a new writable copy
1093  if (ret < 0)
1094  return ret;
1095 
1096  for (y = s->last_y_offset; y < s->last_y_offset + s->last_h; y++) {
1097  memset(s->last_picture.f->data[0] + s->image_linesize * y +
1098  s->bpp * s->last_x_offset, 0, s->bpp * s->last_w);
1099  }
1100  }
1101 
1102  memcpy(buffer, s->last_picture.f->data[0], s->image_linesize * s->height);
1103 
1104  // Perform blending
1105  if (s->blend_op == APNG_BLEND_OP_SOURCE) {
1106  for (y = s->y_offset; y < s->y_offset + s->cur_h; ++y) {
1107  size_t row_start = s->image_linesize * y + s->bpp * s->x_offset;
1108  memcpy(buffer + row_start, p->data[0] + row_start, s->bpp * s->cur_w);
1109  }
1110  } else { // APNG_BLEND_OP_OVER
1111  for (y = s->y_offset; y < s->y_offset + s->cur_h; ++y) {
1112  uint8_t *foreground = p->data[0] + s->image_linesize * y + s->bpp * s->x_offset;
1113  uint8_t *background = buffer + s->image_linesize * y + s->bpp * s->x_offset;
1114  for (x = s->x_offset; x < s->x_offset + s->cur_w; ++x, foreground += s->bpp, background += s->bpp) {
1115  size_t b;
1116  uint8_t foreground_alpha, background_alpha, output_alpha;
1117  uint8_t output[10];
1118 
1119  // Since we might be blending alpha onto alpha, we use the following equations:
1120  // output_alpha = foreground_alpha + (1 - foreground_alpha) * background_alpha
1121  // output = (foreground_alpha * foreground + (1 - foreground_alpha) * background_alpha * background) / output_alpha
1122 
1123  switch (avctx->pix_fmt) {
1124  case AV_PIX_FMT_RGBA:
1125  foreground_alpha = foreground[3];
1126  background_alpha = background[3];
1127  break;
1128 
1129  case AV_PIX_FMT_GRAY8A:
1130  foreground_alpha = foreground[1];
1131  background_alpha = background[1];
1132  break;
1133 
1134  case AV_PIX_FMT_PAL8:
1135  foreground_alpha = s->palette[foreground[0]] >> 24;
1136  background_alpha = s->palette[background[0]] >> 24;
1137  break;
1138  }
1139 
1140  if (foreground_alpha == 0)
1141  continue;
1142 
1143  if (foreground_alpha == 255) {
1144  memcpy(background, foreground, s->bpp);
1145  continue;
1146  }
1147 
1148  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
1149  // TODO: Alpha blending with PAL8 will likely need the entire image converted over to RGBA first
1150  avpriv_request_sample(avctx, "Alpha blending palette samples");
1151  background[0] = foreground[0];
1152  continue;
1153  }
1154 
1155  output_alpha = foreground_alpha + FAST_DIV255((255 - foreground_alpha) * background_alpha);
1156 
1157  av_assert0(s->bpp <= 10);
1158 
1159  for (b = 0; b < s->bpp - 1; ++b) {
1160  if (output_alpha == 0) {
1161  output[b] = 0;
1162  } else if (background_alpha == 255) {
1163  output[b] = FAST_DIV255(foreground_alpha * foreground[b] + (255 - foreground_alpha) * background[b]);
1164  } else {
1165  output[b] = (255 * foreground_alpha * foreground[b] + (255 - foreground_alpha) * background_alpha * background[b]) / (255 * output_alpha);
1166  }
1167  }
1168  output[b] = output_alpha;
1169  memcpy(background, output, s->bpp);
1170  }
1171  }
1172  }
1173 
1174  // Copy blended buffer into the frame and free
1175  memcpy(p->data[0], buffer, s->image_linesize * s->height);
1176  av_free(buffer);
1177 
1178  return 0;
1179 }
1180 
1182  AVFrame *p, AVPacket *avpkt)
1183 {
1184  const AVCRC *crc_tab = av_crc_get_table(AV_CRC_32_IEEE_LE);
1185  AVDictionary **metadatap = NULL;
1186  uint32_t tag, length;
1187  int decode_next_dat = 0;
1188  int i, ret;
1189 
1190  for (;;) {
1191  length = bytestream2_get_bytes_left(&s->gb);
1192  if (length <= 0) {
1193 
1194  if (avctx->codec_id == AV_CODEC_ID_PNG &&
1195  avctx->skip_frame == AVDISCARD_ALL) {
1196  return 0;
1197  }
1198 
1199  if (CONFIG_APNG_DECODER && avctx->codec_id == AV_CODEC_ID_APNG && length == 0) {
1200  if (!(s->pic_state & PNG_IDAT))
1201  return 0;
1202  else
1203  goto exit_loop;
1204  }
1205  av_log(avctx, AV_LOG_ERROR, "%d bytes left\n", length);
1206  if ( s->pic_state & PNG_ALLIMAGE
1208  goto exit_loop;
1209  ret = AVERROR_INVALIDDATA;
1210  goto fail;
1211  }
1212 
1213  length = bytestream2_get_be32(&s->gb);
1214  if (length > 0x7fffffff || length > bytestream2_get_bytes_left(&s->gb)) {
1215  av_log(avctx, AV_LOG_ERROR, "chunk too big\n");
1216  ret = AVERROR_INVALIDDATA;
1217  goto fail;
1218  }
1219  if (avctx->err_recognition & (AV_EF_CRCCHECK | AV_EF_IGNORE_ERR)) {
1220  uint32_t crc_sig = AV_RB32(s->gb.buffer + length + 4);
1221  uint32_t crc_cal = ~av_crc(crc_tab, UINT32_MAX, s->gb.buffer, length + 4);
1222  if (crc_sig ^ crc_cal) {
1223  av_log(avctx, AV_LOG_ERROR, "CRC mismatch in chunk");
1224  if (avctx->err_recognition & AV_EF_EXPLODE) {
1225  av_log(avctx, AV_LOG_ERROR, ", quitting\n");
1226  ret = AVERROR_INVALIDDATA;
1227  goto fail;
1228  }
1229  av_log(avctx, AV_LOG_ERROR, ", skipping\n");
1230  bytestream2_skip(&s->gb, 4); /* tag */
1231  goto skip_tag;
1232  }
1233  }
1234  tag = bytestream2_get_le32(&s->gb);
1235  if (avctx->debug & FF_DEBUG_STARTCODE)
1236  av_log(avctx, AV_LOG_DEBUG, "png: tag=%s length=%u\n",
1237  av_fourcc2str(tag), length);
1238 
1239  if (avctx->codec_id == AV_CODEC_ID_PNG &&
1240  avctx->skip_frame == AVDISCARD_ALL) {
1241  switch(tag) {
1242  case MKTAG('I', 'H', 'D', 'R'):
1243  case MKTAG('p', 'H', 'Y', 's'):
1244  case MKTAG('t', 'E', 'X', 't'):
1245  case MKTAG('I', 'D', 'A', 'T'):
1246  case MKTAG('t', 'R', 'N', 'S'):
1247  break;
1248  default:
1249  goto skip_tag;
1250  }
1251  }
1252 
1253  metadatap = &p->metadata;
1254  switch (tag) {
1255  case MKTAG('I', 'H', 'D', 'R'):
1256  if ((ret = decode_ihdr_chunk(avctx, s, length)) < 0)
1257  goto fail;
1258  break;
1259  case MKTAG('p', 'H', 'Y', 's'):
1260  if ((ret = decode_phys_chunk(avctx, s)) < 0)
1261  goto fail;
1262  break;
1263  case MKTAG('f', 'c', 'T', 'L'):
1264  if (!CONFIG_APNG_DECODER || avctx->codec_id != AV_CODEC_ID_APNG)
1265  goto skip_tag;
1266  if ((ret = decode_fctl_chunk(avctx, s, length)) < 0)
1267  goto fail;
1268  decode_next_dat = 1;
1269  break;
1270  case MKTAG('f', 'd', 'A', 'T'):
1271  if (!CONFIG_APNG_DECODER || avctx->codec_id != AV_CODEC_ID_APNG)
1272  goto skip_tag;
1273  if (!decode_next_dat || length < 4) {
1274  ret = AVERROR_INVALIDDATA;
1275  goto fail;
1276  }
1277  bytestream2_get_be32(&s->gb);
1278  length -= 4;
1279  /* fallthrough */
1280  case MKTAG('I', 'D', 'A', 'T'):
1281  if (CONFIG_APNG_DECODER && avctx->codec_id == AV_CODEC_ID_APNG && !decode_next_dat)
1282  goto skip_tag;
1283  if ((ret = decode_idat_chunk(avctx, s, length, p)) < 0)
1284  goto fail;
1285  break;
1286  case MKTAG('P', 'L', 'T', 'E'):
1287  if (decode_plte_chunk(avctx, s, length) < 0)
1288  goto skip_tag;
1289  break;
1290  case MKTAG('t', 'R', 'N', 'S'):
1291  if (decode_trns_chunk(avctx, s, length) < 0)
1292  goto skip_tag;
1293  break;
1294  case MKTAG('t', 'E', 'X', 't'):
1295  if (decode_text_chunk(s, length, 0, metadatap) < 0)
1296  av_log(avctx, AV_LOG_WARNING, "Broken tEXt chunk\n");
1297  bytestream2_skip(&s->gb, length + 4);
1298  break;
1299  case MKTAG('z', 'T', 'X', 't'):
1300  if (decode_text_chunk(s, length, 1, metadatap) < 0)
1301  av_log(avctx, AV_LOG_WARNING, "Broken zTXt chunk\n");
1302  bytestream2_skip(&s->gb, length + 4);
1303  break;
1304  case MKTAG('s', 'T', 'E', 'R'): {
1305  int mode = bytestream2_get_byte(&s->gb);
1307  if (!stereo3d) {
1308  ret = AVERROR(ENOMEM);
1309  goto fail;
1310  }
1311 
1312  if (mode == 0 || mode == 1) {
1313  stereo3d->type = AV_STEREO3D_SIDEBYSIDE;
1314  stereo3d->flags = mode ? 0 : AV_STEREO3D_FLAG_INVERT;
1315  } else {
1316  av_log(avctx, AV_LOG_WARNING,
1317  "Unknown value in sTER chunk (%d)\n", mode);
1318  }
1319  bytestream2_skip(&s->gb, 4); /* crc */
1320  break;
1321  }
1322  case MKTAG('i', 'C', 'C', 'P'): {
1323  if ((ret = decode_iccp_chunk(s, length, p)) < 0)
1324  goto fail;
1325  break;
1326  }
1327  case MKTAG('c', 'H', 'R', 'M'): {
1329  if (!mdm) {
1330  ret = AVERROR(ENOMEM);
1331  goto fail;
1332  }
1333 
1334  mdm->white_point[0] = av_make_q(bytestream2_get_be32(&s->gb), 100000);
1335  mdm->white_point[1] = av_make_q(bytestream2_get_be32(&s->gb), 100000);
1336 
1337  /* RGB Primaries */
1338  for (i = 0; i < 3; i++) {
1339  mdm->display_primaries[i][0] = av_make_q(bytestream2_get_be32(&s->gb), 100000);
1340  mdm->display_primaries[i][1] = av_make_q(bytestream2_get_be32(&s->gb), 100000);
1341  }
1342 
1343  mdm->has_primaries = 1;
1344  bytestream2_skip(&s->gb, 4); /* crc */
1345  break;
1346  }
1347  case MKTAG('g', 'A', 'M', 'A'): {
1348  AVBPrint bp;
1349  char *gamma_str;
1350  int num = bytestream2_get_be32(&s->gb);
1351 
1353  av_bprintf(&bp, "%i/%i", num, 100000);
1354  ret = av_bprint_finalize(&bp, &gamma_str);
1355  if (ret < 0)
1356  return ret;
1357 
1358  av_dict_set(&p->metadata, "gamma", gamma_str, AV_DICT_DONT_STRDUP_VAL);
1359 
1360  bytestream2_skip(&s->gb, 4); /* crc */
1361  break;
1362  }
1363  case MKTAG('I', 'E', 'N', 'D'):
1364  if (!(s->pic_state & PNG_ALLIMAGE))
1365  av_log(avctx, AV_LOG_ERROR, "IEND without all image\n");
1366  if (!(s->pic_state & (PNG_ALLIMAGE|PNG_IDAT))) {
1367  ret = AVERROR_INVALIDDATA;
1368  goto fail;
1369  }
1370  bytestream2_skip(&s->gb, 4); /* crc */
1371  goto exit_loop;
1372  default:
1373  /* skip tag */
1374 skip_tag:
1375  bytestream2_skip(&s->gb, length + 4);
1376  break;
1377  }
1378  }
1379 exit_loop:
1380 
1381  if (avctx->codec_id == AV_CODEC_ID_PNG &&
1382  avctx->skip_frame == AVDISCARD_ALL) {
1383  return 0;
1384  }
1385 
1387  return AVERROR_INVALIDDATA;
1388 
1389  if (s->bits_per_pixel <= 4)
1390  handle_small_bpp(s, p);
1391 
1392  /* apply transparency if needed */
1393  if (s->has_trns && s->color_type != PNG_COLOR_TYPE_PALETTE) {
1394  size_t byte_depth = s->bit_depth > 8 ? 2 : 1;
1395  size_t raw_bpp = s->bpp - byte_depth;
1396  unsigned x, y;
1397 
1398  av_assert0(s->bit_depth > 1);
1399 
1400  for (y = 0; y < s->height; ++y) {
1401  uint8_t *row = &s->image_buf[s->image_linesize * y];
1402 
1403  if (s->bpp == 2 && byte_depth == 1) {
1404  uint8_t *pixel = &row[2 * s->width - 1];
1405  uint8_t *rowp = &row[1 * s->width - 1];
1406  int tcolor = s->transparent_color_be[0];
1407  for (x = s->width; x > 0; --x) {
1408  *pixel-- = *rowp == tcolor ? 0 : 0xff;
1409  *pixel-- = *rowp--;
1410  }
1411  } else if (s->bpp == 4 && byte_depth == 1) {
1412  uint8_t *pixel = &row[4 * s->width - 1];
1413  uint8_t *rowp = &row[3 * s->width - 1];
1414  int tcolor = AV_RL24(s->transparent_color_be);
1415  for (x = s->width; x > 0; --x) {
1416  *pixel-- = AV_RL24(rowp-2) == tcolor ? 0 : 0xff;
1417  *pixel-- = *rowp--;
1418  *pixel-- = *rowp--;
1419  *pixel-- = *rowp--;
1420  }
1421  } else {
1422  /* since we're updating in-place, we have to go from right to left */
1423  for (x = s->width; x > 0; --x) {
1424  uint8_t *pixel = &row[s->bpp * (x - 1)];
1425  memmove(pixel, &row[raw_bpp * (x - 1)], raw_bpp);
1426 
1427  if (!memcmp(pixel, s->transparent_color_be, raw_bpp)) {
1428  memset(&pixel[raw_bpp], 0, byte_depth);
1429  } else {
1430  memset(&pixel[raw_bpp], 0xff, byte_depth);
1431  }
1432  }
1433  }
1434  }
1435  }
1436 
1437  /* handle P-frames only if a predecessor frame is available */
1438  if (s->last_picture.f->data[0]) {
1439  if ( !(avpkt->flags & AV_PKT_FLAG_KEY) && avctx->codec_tag != AV_RL32("MPNG")
1440  && s->last_picture.f->width == p->width
1441  && s->last_picture.f->height== p->height
1442  && s->last_picture.f->format== p->format
1443  ) {
1444  if (CONFIG_PNG_DECODER && avctx->codec_id != AV_CODEC_ID_APNG)
1445  handle_p_frame_png(s, p);
1446  else if (CONFIG_APNG_DECODER &&
1447  avctx->codec_id == AV_CODEC_ID_APNG &&
1448  (ret = handle_p_frame_apng(avctx, s, p)) < 0)
1449  goto fail;
1450  }
1451  }
1452  ff_thread_report_progress(&s->picture, INT_MAX, 0);
1453 
1454  return 0;
1455 
1456 fail:
1457  ff_thread_report_progress(&s->picture, INT_MAX, 0);
1458  return ret;
1459 }
1460 
1461 #if CONFIG_PNG_DECODER
1462 static int decode_frame_png(AVCodecContext *avctx,
1463  void *data, int *got_frame,
1464  AVPacket *avpkt)
1465 {
1466  PNGDecContext *const s = avctx->priv_data;
1467  const uint8_t *buf = avpkt->data;
1468  int buf_size = avpkt->size;
1469  AVFrame *p = s->picture.f;
1470  int64_t sig;
1471  int ret;
1472 
1473  bytestream2_init(&s->gb, buf, buf_size);
1474 
1475  /* check signature */
1476  sig = bytestream2_get_be64(&s->gb);
1477  if (sig != PNGSIG &&
1478  sig != MNGSIG) {
1479  av_log(avctx, AV_LOG_ERROR, "Invalid PNG signature 0x%08"PRIX64".\n", sig);
1480  return AVERROR_INVALIDDATA;
1481  }
1482 
1483  s->y = s->has_trns = 0;
1484  s->hdr_state = 0;
1485  s->pic_state = 0;
1486 
1487  /* init the zlib */
1488  s->zstream.zalloc = ff_png_zalloc;
1489  s->zstream.zfree = ff_png_zfree;
1490  s->zstream.opaque = NULL;
1491  ret = inflateInit(&s->zstream);
1492  if (ret != Z_OK) {
1493  av_log(avctx, AV_LOG_ERROR, "inflateInit returned error %d\n", ret);
1494  return AVERROR_EXTERNAL;
1495  }
1496 
1497  if ((ret = decode_frame_common(avctx, s, p, avpkt)) < 0)
1498  goto the_end;
1499 
1500  if (avctx->skip_frame == AVDISCARD_ALL) {
1501  *got_frame = 0;
1502  ret = bytestream2_tell(&s->gb);
1503  goto the_end;
1504  }
1505 
1506  if ((ret = av_frame_ref(data, s->picture.f)) < 0)
1507  goto the_end;
1508 
1509  if (!(avctx->active_thread_type & FF_THREAD_FRAME)) {
1512  }
1513 
1514  *got_frame = 1;
1515 
1516  ret = bytestream2_tell(&s->gb);
1517 the_end:
1518  inflateEnd(&s->zstream);
1519  s->crow_buf = NULL;
1520  return ret;
1521 }
1522 #endif
1523 
1524 #if CONFIG_APNG_DECODER
1525 static int decode_frame_apng(AVCodecContext *avctx,
1526  void *data, int *got_frame,
1527  AVPacket *avpkt)
1528 {
1529  PNGDecContext *const s = avctx->priv_data;
1530  int ret;
1531  AVFrame *p = s->picture.f;
1532 
1533  if (!(s->hdr_state & PNG_IHDR)) {
1534  if (!avctx->extradata_size)
1535  return AVERROR_INVALIDDATA;
1536 
1537  /* only init fields, there is no zlib use in extradata */
1538  s->zstream.zalloc = ff_png_zalloc;
1539  s->zstream.zfree = ff_png_zfree;
1540 
1541  bytestream2_init(&s->gb, avctx->extradata, avctx->extradata_size);
1542  if ((ret = decode_frame_common(avctx, s, p, avpkt)) < 0)
1543  goto end;
1544  }
1545 
1546  /* reset state for a new frame */
1547  if ((ret = inflateInit(&s->zstream)) != Z_OK) {
1548  av_log(avctx, AV_LOG_ERROR, "inflateInit returned error %d\n", ret);
1549  ret = AVERROR_EXTERNAL;
1550  goto end;
1551  }
1552  s->y = 0;
1553  s->pic_state = 0;
1554  bytestream2_init(&s->gb, avpkt->data, avpkt->size);
1555  if ((ret = decode_frame_common(avctx, s, p, avpkt)) < 0)
1556  goto end;
1557 
1558  if (!(s->pic_state & PNG_ALLIMAGE))
1559  av_log(avctx, AV_LOG_WARNING, "Frame did not contain a complete image\n");
1560  if (!(s->pic_state & (PNG_ALLIMAGE|PNG_IDAT))) {
1561  ret = AVERROR_INVALIDDATA;
1562  goto end;
1563  }
1564  if ((ret = av_frame_ref(data, s->picture.f)) < 0)
1565  goto end;
1566 
1567  if (!(avctx->active_thread_type & FF_THREAD_FRAME)) {
1569  ff_thread_release_buffer(avctx, &s->picture);
1570  } else if (s->dispose_op == APNG_DISPOSE_OP_NONE) {
1573  }
1574  }
1575 
1576  *got_frame = 1;
1577  ret = bytestream2_tell(&s->gb);
1578 
1579 end:
1580  inflateEnd(&s->zstream);
1581  return ret;
1582 }
1583 #endif
1584 
1585 #if HAVE_THREADS
1586 static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
1587 {
1588  PNGDecContext *psrc = src->priv_data;
1589  PNGDecContext *pdst = dst->priv_data;
1590  ThreadFrame *src_frame = NULL;
1591  int ret;
1592 
1593  if (dst == src)
1594  return 0;
1595 
1596  if (CONFIG_APNG_DECODER && dst->codec_id == AV_CODEC_ID_APNG) {
1597 
1598  pdst->width = psrc->width;
1599  pdst->height = psrc->height;
1600  pdst->bit_depth = psrc->bit_depth;
1601  pdst->color_type = psrc->color_type;
1602  pdst->compression_type = psrc->compression_type;
1603  pdst->interlace_type = psrc->interlace_type;
1604  pdst->filter_type = psrc->filter_type;
1605  pdst->cur_w = psrc->cur_w;
1606  pdst->cur_h = psrc->cur_h;
1607  pdst->x_offset = psrc->x_offset;
1608  pdst->y_offset = psrc->y_offset;
1609  pdst->has_trns = psrc->has_trns;
1610  memcpy(pdst->transparent_color_be, psrc->transparent_color_be, sizeof(pdst->transparent_color_be));
1611 
1612  pdst->dispose_op = psrc->dispose_op;
1613 
1614  memcpy(pdst->palette, psrc->palette, sizeof(pdst->palette));
1615 
1616  pdst->hdr_state |= psrc->hdr_state;
1617  }
1618 
1619  src_frame = psrc->dispose_op == APNG_DISPOSE_OP_NONE ?
1620  &psrc->picture : &psrc->last_picture;
1621 
1623  if (src_frame && src_frame->f->data[0]) {
1624  ret = ff_thread_ref_frame(&pdst->last_picture, src_frame);
1625  if (ret < 0)
1626  return ret;
1627  }
1628 
1629  return 0;
1630 }
1631 #endif
1632 
1634 {
1635  PNGDecContext *s = avctx->priv_data;
1636 
1637  avctx->color_range = AVCOL_RANGE_JPEG;
1638 
1639  s->avctx = avctx;
1640  s->last_picture.f = av_frame_alloc();
1641  s->picture.f = av_frame_alloc();
1642  if (!s->last_picture.f || !s->picture.f) {
1644  av_frame_free(&s->picture.f);
1645  return AVERROR(ENOMEM);
1646  }
1647 
1648  ff_pngdsp_init(&s->dsp);
1649 
1650  return 0;
1651 }
1652 
1654 {
1655  PNGDecContext *s = avctx->priv_data;
1656 
1659  ff_thread_release_buffer(avctx, &s->picture);
1660  av_frame_free(&s->picture.f);
1661  av_freep(&s->buffer);
1662  s->buffer_size = 0;
1663  av_freep(&s->last_row);
1664  s->last_row_size = 0;
1665  av_freep(&s->tmp_row);
1666  s->tmp_row_size = 0;
1667 
1668  return 0;
1669 }
1670 
1671 #if CONFIG_APNG_DECODER
1673  .name = "apng",
1674  .long_name = NULL_IF_CONFIG_SMALL("APNG (Animated Portable Network Graphics) image"),
1675  .type = AVMEDIA_TYPE_VIDEO,
1676  .id = AV_CODEC_ID_APNG,
1677  .priv_data_size = sizeof(PNGDecContext),
1678  .init = png_dec_init,
1679  .close = png_dec_end,
1680  .decode = decode_frame_apng,
1682  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS /*| AV_CODEC_CAP_DRAW_HORIZ_BAND*/,
1683  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE |
1685 };
1686 #endif
1687 
1688 #if CONFIG_PNG_DECODER
1690  .name = "png",
1691  .long_name = NULL_IF_CONFIG_SMALL("PNG (Portable Network Graphics) image"),
1692  .type = AVMEDIA_TYPE_VIDEO,
1693  .id = AV_CODEC_ID_PNG,
1694  .priv_data_size = sizeof(PNGDecContext),
1695  .init = png_dec_init,
1696  .close = png_dec_end,
1697  .decode = decode_frame_png,
1699  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS /*| AV_CODEC_CAP_DRAW_HORIZ_BAND*/,
1702 };
1703 #endif
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:167
static int decode_idat_chunk(AVCodecContext *avctx, PNGDecContext *s, uint32_t length, AVFrame *p)
Definition: pngdec.c:628
static int decode_fctl_chunk(AVCodecContext *avctx, PNGDecContext *s, uint32_t length)
Definition: pngdec.c:969
#define PNG_FILTER_VALUE_AVG
Definition: png.h:43
static void png_handle_row(PNGDecContext *s)
Definition: pngdec.c:333
#define NULL
Definition: coverity.c:32
int last_y_offset
Definition: pngdec.c:66
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane...
Definition: imgutils.c:76
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
void av_bprintf(AVBPrint *buf, const char *fmt,...)
Definition: bprint.c:94
This structure describes decoded (raw) audio or video data.
Definition: frame.h:314
int width
Definition: pngdec.c:62
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
unsigned int tmp_row_size
Definition: pngdec.c:87
8 bits gray, 8 bits alpha
Definition: pixfmt.h:143
misc image utilities
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
AVFrame * f
Definition: thread.h:35
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:68
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:106
const char * g
Definition: vf_curves.c:117
int pass_row_size
Definition: pngdec.c:93
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:31
AVDictionary * metadata
Definition: frame.h:224
uint8_t * tmp_row
Definition: pngdec.c:86
#define avpriv_request_sample(...)
PNGHeaderState
Definition: pngdec.c:42
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:1166
AVRational white_point[2]
CIE 1931 xy chromaticity coords of white point.
int num
Numerator.
Definition: rational.h:59
static int decode_text_chunk(PNGDecContext *s, uint32_t length, int compressed, AVDictionary **dict)
Definition: pngdec.c:512
int size
Definition: packet.h:364
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel...
Definition: avcodec.h:910
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:36
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:741
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:72
enum PNGImageState pic_state
Definition: pngdec.c:61
int has_primaries
Flag indicating whether the display primaries (and white point) are set.
discard all
Definition: avcodec.h:236
Views are next to each other.
Definition: stereo3d.h:67
#define PNG_COLOR_TYPE_RGB
Definition: png.h:35
static void error(const char *err)
void(* add_bytes_l2)(uint8_t *dst, uint8_t *src1, uint8_t *src2, int w)
Definition: pngdsp.h:28
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them.reget_buffer() and buffer age optimizations no longer work.*The contents of buffers must not be written to after ff_thread_report_progress() has been called on them.This includes draw_edges().Porting codecs to frame threading
#define PNG_COLOR_TYPE_GRAY_ALPHA
Definition: png.h:37
AVCodec.
Definition: codec.h:190
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
#define PNG_COLOR_TYPE_PALETTE
Definition: png.h:34
int av_bprint_finalize(AVBPrint *buf, char **ret_str)
Finalize a print buffer.
Definition: bprint.c:235
int filter_type
Definition: pngdec.c:73
void ff_add_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top, int w, int bpp)
Definition: pngdec.c:186
#define AV_DICT_DONT_STRDUP_KEY
Take ownership of a key that&#39;s been allocated with av_malloc() or another memory allocation function...
Definition: dict.h:73
#define PNG_FILTER_VALUE_PAETH
Definition: png.h:44
enum AVDiscard skip_frame
Skip decoding for selected frames.
Definition: avcodec.h:1997
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
static int percent_missing(PNGDecContext *s)
Definition: pngdec.c:323
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:40
int y_offset
Definition: pngdec.c:65
uint8_t
#define av_cold
Definition: attributes.h:88
#define av_malloc(s)
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
#define PNG_COLOR_TYPE_RGB_ALPHA
Definition: png.h:36
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
Stereo 3D type: this structure describes how two videos are packed within a single video surface...
Definition: stereo3d.h:176
void ff_png_filter_row(PNGDSPContext *dsp, uint8_t *dst, int filter_type, uint8_t *src, uint8_t *last, int size, int bpp)
Definition: pngdec.c:252
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:1619
#define f(width, name)
Definition: cbs_vp9.c:255
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
Multithreading support functions.
AVCodec ff_apng_decoder
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
Definition: pixfmt.h:205
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:632
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:91
Public header for CRC hash function implementation.
static int decode_phys_chunk(AVCodecContext *avctx, PNGDecContext *s)
Definition: pngdec.c:612
Structure to hold side data for an AVFrame.
Definition: frame.h:220
uint8_t * data
Definition: packet.h:363
const uint8_t * buffer
Definition: bytestream.h:34
uint32_t tag
Definition: movenc.c:1597
#define ff_dlog(a,...)
AVDictionary * metadata
metadata.
Definition: frame.h:600
static int decode_iccp_chunk(PNGDecContext *s, int length, AVFrame *f)
Definition: pngdec.c:851
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:461
ptrdiff_t size
Definition: opengl_enc.c:100
unsigned int last_row_size
Definition: pngdec.c:85
#define AV_WB16(p, v)
Definition: intreadwrite.h:405
int cur_h
Definition: pngdec.c:63
#define av_log(a,...)
#define FF_CODEC_CAP_ALLOCATE_PROGRESS
Definition: internal.h:75
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:401
static int decode_plte_chunk(AVCodecContext *avctx, PNGDecContext *s, uint32_t length)
Definition: pngdec.c:780
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
#define U(x)
Definition: vp56_arith.h:37
#define src
Definition: vp8dsp.c:255
int width
Definition: frame.h:372
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
static const uint8_t png_pass_dsp_mask[NB_PASSES]
Definition: pngdec.c:109
int flags
Additional information about the frame packing.
Definition: stereo3d.h:185
16 bits gray, 16 bits alpha (big-endian)
Definition: pixfmt.h:212
#define AV_BPRINT_SIZE_UNLIMITED
void ff_thread_release_buffer(AVCodecContext *avctx, ThreadFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
static int decode_frame_common(AVCodecContext *avctx, PNGDecContext *s, AVFrame *p, AVPacket *avpkt)
Definition: pngdec.c:1181
static const uint16_t mask[17]
Definition: lzw.c:38
#define OP_SUB(x, s, l)
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:168
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
static void handle_p_frame_png(PNGDecContext *s, AVFrame *p)
Definition: pngdec.c:1048
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:115
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:1787
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max)
Definition: bprint.c:69
uint8_t * crow_buf
Definition: pngdec.c:83
const char * r
Definition: vf_curves.c:116
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:215
int pass
Definition: pngdec.c:90
int ff_png_get_nb_channels(int color_type)
Definition: png.c:49
ThreadFrame picture
Definition: pngdec.c:58
int height
Definition: pngdec.c:62
#define av_fourcc2str(fourcc)
Definition: avutil.h:348
#define PNGSIG
Definition: png.h:49
simple assert() macros that are a bit more flexible than ISO C assert().
GLsizei GLsizei * length
Definition: opengl_enc.c:114
const char * name
Name of the codec implementation.
Definition: codec.h:197
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_RL24
Definition: bytestream.h:91
int bits_per_pixel
Definition: pngdec.c:75
GetByteContext gb
Definition: pngdec.c:56
#define FFMAX(a, b)
Definition: common.h:103
#define NB_PASSES
Definition: png.h:47
#define fail()
Definition: checkasm.h:133
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:106
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
uint8_t blend_op
Definition: pngdec.c:67
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:369
#define ONLY_IF_THREADS_ENABLED(x)
Define a function with only the non-default version specified.
Definition: internal.h:154
AVStereo3D * av_stereo3d_create_side_data(AVFrame *frame)
Allocate a complete AVFrameSideData and add it to the frame.
Definition: stereo3d.c:33
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames.The frames must then be freed with ff_thread_release_buffer().Otherwise decode directly into the user-supplied frames.Call ff_thread_report_progress() after some part of the current picture has decoded.A good place to put this is where draw_horiz_band() is called-add this if it isn't called anywhere
z_stream zstream
Definition: pngdec.c:95
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:317
#define b
Definition: input.c:41
AVMasteringDisplayMetadata * av_mastering_display_metadata_create_side_data(AVFrame *frame)
Allocate a complete AVMasteringDisplayMetadata and add it to the frame.
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:397
alias for AV_PIX_FMT_YA8
Definition: pixfmt.h:146
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:1640
#define FF_THREAD_FRAME
Decode more than one frame at once.
Definition: avcodec.h:1779
#define FFMIN(a, b)
Definition: common.h:105
#define PNG_FILTER_VALUE_SUB
Definition: png.h:41
uint32_t palette[256]
Definition: pngdec.c:82
#define AV_DICT_DONT_STRDUP_VAL
Take ownership of a value that&#39;s been allocated with av_malloc() or another memory allocation functio...
Definition: dict.h:76
#define PNG_COLOR_TYPE_GRAY
Definition: png.h:33
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call ff_thread_finish_setup() afterwards.If some code can't be moved
uint8_t w
Definition: llviddspenc.c:39
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
#define s(width, name)
Definition: cbs_vp9.c:257
uint8_t * last_row
Definition: pngdec.c:84
The data contains an ICC profile as an opaque octet buffer following the format described by ISO 1507...
Definition: frame.h:143
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: avcodec.h:1651
AVCodecContext * avctx
Definition: pngdec.c:54
void av_bprint_get_buffer(AVBPrint *buf, unsigned size, unsigned char **mem, unsigned *actual_size)
Allocate bytes in the buffer for external use.
Definition: bprint.c:218
av_cold void ff_pngdsp_init(PNGDSPContext *dsp)
Definition: pngdsp.c:43
static int decode_zbuf(AVBPrint *bp, const uint8_t *data, const uint8_t *data_end)
Definition: pngdec.c:444
int channels
Definition: pngdec.c:74
int ff_thread_ref_frame(ThreadFrame *dst, const ThreadFrame *src)
Definition: utils.c:1907
Full range content.
Definition: pixfmt.h:586
if(ret)
static int decode_ihdr_chunk(AVCodecContext *avctx, PNGDecContext *s, uint32_t length)
Definition: pngdec.c:560
static uint8_t * iso88591_to_utf8(const uint8_t *in, size_t size_in)
Definition: pngdec.c:488
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:192
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:387
static av_cold int png_dec_init(AVCodecContext *avctx)
Definition: pngdec.c:1633
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:180
int discard_damaged_percentage
The percentage of damaged samples to discard a frame.
Definition: avcodec.h:2319
Libavcodec external API header.
enum PNGHeaderState hdr_state
Definition: pngdec.c:60
int buffer_size
Definition: pngdec.c:89
static int skip_tag(AVIOContext *in, int32_t tag_name)
Definition: ismindex.c:132
enum AVCodecID codec_id
Definition: avcodec.h:541
#define PNG_FILTER_VALUE_UP
Definition: png.h:42
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:345
#define PNG_FILTER_TYPE_LOCO
Definition: png.h:39
uint8_t last_dispose_op
Definition: pngdec.c:68
#define abs(x)
Definition: cuda_runtime.h:35
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
int debug
debug
Definition: avcodec.h:1618
main external API structure.
Definition: avcodec.h:531
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> (&#39;D&#39;<<24) + (&#39;C&#39;<<16) + (&#39;B&#39;<<8) + &#39;A&#39;).
Definition: avcodec.h:556
uint8_t * data
Definition: frame.h:222
int interlace_type
Definition: pngdec.c:72
PNGImageState
Definition: pngdec.c:47
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have update_thread_context() run it in the next thread.Add AV_CODEC_CAP_FRAME_THREADS to the codec capabilities.There will be very little speed gain at this point but it should work.If there are inter-frame dependencies
const uint8_t ff_png_pass_ymask[NB_PASSES]
Definition: png.c:25
int image_linesize
Definition: pngdec.c:81
int extradata_size
Definition: avcodec.h:633
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31))))#define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac){}void ff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map){AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);return NULL;}return ac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;}int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){int use_generic=1;int len=in->nb_samples;int p;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
AVFrameSideData * av_frame_new_side_data(AVFrame *frame, enum AVFrameSideDataType type, int size)
Add a new side data to a frame.
Definition: frame.c:726
#define FF_COMPLIANCE_NORMAL
Definition: avcodec.h:1599
Y , 16bpp, big-endian.
Definition: pixfmt.h:97
static void inflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:198
Rational number (pair of numerator and denominator).
Definition: rational.h:58
Mastering display metadata capable of representing the color volume of the display used to master the...
int cur_w
Definition: pngdec.c:63
uint8_t transparent_color_be[6]
Definition: pngdec.c:78
#define OP_AVG(x, s, l)
#define AV_EF_IGNORE_ERR
ignore errors and continue
Definition: avcodec.h:1653
#define AV_EF_CRCCHECK
Verify checksums embedded in the bitstream (could be of either encoded or decoded data...
Definition: avcodec.h:1648
uint8_t * image_buf
Definition: pngdec.c:80
uint8_t dispose_op
Definition: pngdec.c:67
AVRational display_primaries[3][2]
CIE 1931 xy chromaticity coords of color primaries (r, g, b order).
uint8_t pixel
Definition: tiny_ssim.c:42
void avpriv_report_missing_feature(void *avc, const char *msg,...) av_printf_format(2
Log a generic warning message about a missing feature.
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:611
int last_x_offset
Definition: pngdec.c:66
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:328
#define FAST_DIV255(x)
Definition: pngdec.c:1066
static int handle_p_frame_apng(AVCodecContext *avctx, PNGDecContext *s, AVFrame *p)
Definition: pngdec.c:1068
#define YUV2RGB(NAME, TYPE)
Definition: pngdec.c:309
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
static const uint8_t png_pass_mask[NB_PASSES]
Definition: pngdec.c:99
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb...
Definition: pixfmt.h:76
Y , 8bpp.
Definition: pixfmt.h:74
static av_cold int png_dec_end(AVCodecContext *avctx)
Definition: pngdec.c:1653
void(* add_paeth_prediction)(uint8_t *dst, uint8_t *src, uint8_t *top, int w, int bpp)
Definition: pngdsp.h:33
common internal api header.
static void handle_small_bpp(PNGDecContext *s, AVFrame *p)
Definition: pngdec.c:896
#define FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM
The decoder extracts and fills its parameters even if the frame is skipped due to the skip_frame sett...
Definition: internal.h:60
#define PNG_FILTER_VALUE_NONE
Definition: png.h:40
static int decode_trns_chunk(AVCodecContext *avctx, PNGDecContext *s, uint32_t length)
Definition: pngdec.c:803
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:102
int last_w
Definition: pngdec.c:64
void av_fast_padded_mallocz(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_padded_malloc except that buffer will always be 0-initialized after call...
Definition: utils.c:84
static const uint8_t png_pass_dsp_ymask[NB_PASSES]
Definition: pngdec.c:104
Stereoscopic video.
int den
Denominator.
Definition: rational.h:60
void ff_png_zfree(void *opaque, void *ptr)
Definition: png.c:44
void * priv_data
Definition: avcodec.h:558
static int png_decode_idat(PNGDecContext *s, int length)
Definition: pngdec.c:414
uint8_t * buffer
Definition: pngdec.c:88
#define av_free(p)
#define FF_DEBUG_STARTCODE
Definition: avcodec.h:1626
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:392
int row_size
Definition: pngdec.c:92
APNG common header.
PNGDSPContext dsp
Definition: pngdec.c:53
int compression_type
Definition: pngdec.c:71
int last_h
Definition: pngdec.c:64
int ff_png_pass_row_size(int pass, int bits_per_pixel, int width)
Definition: png.c:62
int height
Definition: frame.h:372
FILE * out
Definition: movenc.c:54
int bit_depth
Definition: pngdec.c:69
#define av_freep(p)
int color_type
Definition: pngdec.c:70
ThreadFrame last_picture
Definition: pngdec.c:57
#define av_malloc_array(a, b)
static void png_put_interlaced_row(uint8_t *dst, int width, int bits_per_pixel, int pass, int color_type, const uint8_t *src)
Definition: pngdec.c:116
#define FFSWAP(type, a, b)
Definition: common.h:108
int crow_size
Definition: pngdec.c:91
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2489
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
int x_offset
Definition: pngdec.c:65
#define MKTAG(a, b, c, d)
Definition: common.h:478
void * ff_png_zalloc(void *opaque, unsigned int items, unsigned int size)
Definition: png.c:39
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:91
This structure stores compressed data.
Definition: packet.h:340
int has_trns
Definition: pngdec.c:77
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:514
mode
Use these values in ebur128_init (or&#39;ed).
Definition: ebur128.h:83
uint32_t AVCRC
Definition: crc.h:47
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:50
int strict_std_compliance
strictly follow the standard (MPEG-4, ...).
Definition: avcodec.h:1596
AVCodec ff_png_decoder
int i
Definition: input.c:407
#define UNROLL_FILTER(op)
Definition: pngdec.c:237
#define av_mod_uintp2
Definition: common.h:149
#define MNGSIG
Definition: png.h:50