FFmpeg
pngdec.c
Go to the documentation of this file.
1 /*
2  * PNG image format
3  * Copyright (c) 2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 //#define DEBUG
23 
24 #include "libavutil/avassert.h"
25 #include "libavutil/bprint.h"
26 #include "libavutil/crc.h"
27 #include "libavutil/imgutils.h"
28 #include "libavutil/intreadwrite.h"
29 #include "libavutil/stereo3d.h"
31 
32 #include "avcodec.h"
33 #include "bytestream.h"
34 #include "internal.h"
35 #include "apng.h"
36 #include "png.h"
37 #include "pngdsp.h"
38 #include "thread.h"
39 
40 #include <zlib.h>
41 
43  PNG_IHDR = 1 << 0,
44  PNG_PLTE = 1 << 1,
45 };
46 
48  PNG_IDAT = 1 << 0,
49  PNG_ALLIMAGE = 1 << 1,
50 };
51 
52 typedef struct PNGDecContext {
55 
60 
63  int width, height;
64  int cur_w, cur_h;
65  int last_w, last_h;
70  int bit_depth;
75  int channels;
77  int bpp;
78  int has_trns;
80 
83  uint32_t palette[256];
86  unsigned int last_row_size;
88  unsigned int tmp_row_size;
91  int pass;
92  int crow_size; /* compressed row size (include filter type) */
93  int row_size; /* decompressed row size */
94  int pass_row_size; /* decompress row size of the current pass */
95  int y;
96  z_stream zstream;
98 
99 /* Mask to determine which pixels are valid in a pass */
100 static const uint8_t png_pass_mask[NB_PASSES] = {
101  0x01, 0x01, 0x11, 0x11, 0x55, 0x55, 0xff,
102 };
103 
104 /* Mask to determine which y pixels can be written in a pass */
106  0xff, 0xff, 0x0f, 0xff, 0x33, 0xff, 0x55,
107 };
108 
109 /* Mask to determine which pixels to overwrite while displaying */
111  0xff, 0x0f, 0xff, 0x33, 0xff, 0x55, 0xff
112 };
113 
114 /* NOTE: we try to construct a good looking image at each pass. width
115  * is the original image width. We also do pixel format conversion at
116  * this stage */
117 static void png_put_interlaced_row(uint8_t *dst, int width,
118  int bits_per_pixel, int pass,
119  int color_type, const uint8_t *src)
120 {
121  int x, mask, dsp_mask, j, src_x, b, bpp;
122  uint8_t *d;
123  const uint8_t *s;
124 
125  mask = png_pass_mask[pass];
126  dsp_mask = png_pass_dsp_mask[pass];
127 
128  switch (bits_per_pixel) {
129  case 1:
130  src_x = 0;
131  for (x = 0; x < width; x++) {
132  j = (x & 7);
133  if ((dsp_mask << j) & 0x80) {
134  b = (src[src_x >> 3] >> (7 - (src_x & 7))) & 1;
135  dst[x >> 3] &= 0xFF7F>>j;
136  dst[x >> 3] |= b << (7 - j);
137  }
138  if ((mask << j) & 0x80)
139  src_x++;
140  }
141  break;
142  case 2:
143  src_x = 0;
144  for (x = 0; x < width; x++) {
145  int j2 = 2 * (x & 3);
146  j = (x & 7);
147  if ((dsp_mask << j) & 0x80) {
148  b = (src[src_x >> 2] >> (6 - 2*(src_x & 3))) & 3;
149  dst[x >> 2] &= 0xFF3F>>j2;
150  dst[x >> 2] |= b << (6 - j2);
151  }
152  if ((mask << j) & 0x80)
153  src_x++;
154  }
155  break;
156  case 4:
157  src_x = 0;
158  for (x = 0; x < width; x++) {
159  int j2 = 4*(x&1);
160  j = (x & 7);
161  if ((dsp_mask << j) & 0x80) {
162  b = (src[src_x >> 1] >> (4 - 4*(src_x & 1))) & 15;
163  dst[x >> 1] &= 0xFF0F>>j2;
164  dst[x >> 1] |= b << (4 - j2);
165  }
166  if ((mask << j) & 0x80)
167  src_x++;
168  }
169  break;
170  default:
171  bpp = bits_per_pixel >> 3;
172  d = dst;
173  s = src;
174  for (x = 0; x < width; x++) {
175  j = x & 7;
176  if ((dsp_mask << j) & 0x80) {
177  memcpy(d, s, bpp);
178  }
179  d += bpp;
180  if ((mask << j) & 0x80)
181  s += bpp;
182  }
183  break;
184  }
185 }
186 
188  int w, int bpp)
189 {
190  int i;
191  for (i = 0; i < w; i++) {
192  int a, b, c, p, pa, pb, pc;
193 
194  a = dst[i - bpp];
195  b = top[i];
196  c = top[i - bpp];
197 
198  p = b - c;
199  pc = a - c;
200 
201  pa = abs(p);
202  pb = abs(pc);
203  pc = abs(p + pc);
204 
205  if (pa <= pb && pa <= pc)
206  p = a;
207  else if (pb <= pc)
208  p = b;
209  else
210  p = c;
211  dst[i] = p + src[i];
212  }
213 }
214 
215 #define UNROLL1(bpp, op) \
216  { \
217  r = dst[0]; \
218  if (bpp >= 2) \
219  g = dst[1]; \
220  if (bpp >= 3) \
221  b = dst[2]; \
222  if (bpp >= 4) \
223  a = dst[3]; \
224  for (; i <= size - bpp; i += bpp) { \
225  dst[i + 0] = r = op(r, src[i + 0], last[i + 0]); \
226  if (bpp == 1) \
227  continue; \
228  dst[i + 1] = g = op(g, src[i + 1], last[i + 1]); \
229  if (bpp == 2) \
230  continue; \
231  dst[i + 2] = b = op(b, src[i + 2], last[i + 2]); \
232  if (bpp == 3) \
233  continue; \
234  dst[i + 3] = a = op(a, src[i + 3], last[i + 3]); \
235  } \
236  }
237 
238 #define UNROLL_FILTER(op) \
239  if (bpp == 1) { \
240  UNROLL1(1, op) \
241  } else if (bpp == 2) { \
242  UNROLL1(2, op) \
243  } else if (bpp == 3) { \
244  UNROLL1(3, op) \
245  } else if (bpp == 4) { \
246  UNROLL1(4, op) \
247  } \
248  for (; i < size; i++) { \
249  dst[i] = op(dst[i - bpp], src[i], last[i]); \
250  }
251 
252 /* NOTE: 'dst' can be equal to 'last' */
254  uint8_t *src, uint8_t *last, int size, int bpp)
255 {
256  int i, p, r, g, b, a;
257 
258  switch (filter_type) {
260  memcpy(dst, src, size);
261  break;
263  for (i = 0; i < bpp; i++)
264  dst[i] = src[i];
265  if (bpp == 4) {
266  p = *(int *)dst;
267  for (; i < size; i += bpp) {
268  unsigned s = *(int *)(src + i);
269  p = ((s & 0x7f7f7f7f) + (p & 0x7f7f7f7f)) ^ ((s ^ p) & 0x80808080);
270  *(int *)(dst + i) = p;
271  }
272  } else {
273 #define OP_SUB(x, s, l) ((x) + (s))
275  }
276  break;
277  case PNG_FILTER_VALUE_UP:
278  dsp->add_bytes_l2(dst, src, last, size);
279  break;
281  for (i = 0; i < bpp; i++) {
282  p = (last[i] >> 1);
283  dst[i] = p + src[i];
284  }
285 #define OP_AVG(x, s, l) (((((x) + (l)) >> 1) + (s)) & 0xff)
287  break;
289  for (i = 0; i < bpp; i++) {
290  p = last[i];
291  dst[i] = p + src[i];
292  }
293  if (bpp > 2 && size > 4) {
294  /* would write off the end of the array if we let it process
295  * the last pixel with bpp=3 */
296  int w = (bpp & 3) ? size - 3 : size;
297 
298  if (w > i) {
299  dsp->add_paeth_prediction(dst + i, src + i, last + i, size - i, bpp);
300  i = w;
301  }
302  }
303  ff_add_png_paeth_prediction(dst + i, src + i, last + i, size - i, bpp);
304  break;
305  }
306 }
307 
308 /* This used to be called "deloco" in FFmpeg
309  * and is actually an inverse reversible colorspace transformation */
310 #define YUV2RGB(NAME, TYPE) \
311 static void deloco_ ## NAME(TYPE *dst, int size, int alpha) \
312 { \
313  int i; \
314  for (i = 0; i < size; i += 3 + alpha) { \
315  int g = dst [i + 1]; \
316  dst[i + 0] += g; \
317  dst[i + 2] += g; \
318  } \
319 }
320 
321 YUV2RGB(rgb8, uint8_t)
322 YUV2RGB(rgb16, uint16_t)
323 
325 {
326  if (s->interlace_type) {
327  return 100 - 100 * s->pass / (NB_PASSES - 1);
328  } else {
329  return 100 - 100 * s->y / s->cur_h;
330  }
331 }
332 
333 /* process exactly one decompressed row */
335 {
336  uint8_t *ptr, *last_row;
337  int got_line;
338 
339  if (!s->interlace_type) {
340  ptr = s->image_buf + s->image_linesize * (s->y + s->y_offset) + s->x_offset * s->bpp;
341  if (s->y == 0)
342  last_row = s->last_row;
343  else
344  last_row = ptr - s->image_linesize;
345 
346  png_filter_row(&s->dsp, ptr, s->crow_buf[0], s->crow_buf + 1,
347  last_row, s->row_size, s->bpp);
348  /* loco lags by 1 row so that it doesn't interfere with top prediction */
349  if (s->filter_type == PNG_FILTER_TYPE_LOCO && s->y > 0) {
350  if (s->bit_depth == 16) {
351  deloco_rgb16((uint16_t *)(ptr - s->image_linesize), s->row_size / 2,
353  } else {
354  deloco_rgb8(ptr - s->image_linesize, s->row_size,
356  }
357  }
358  s->y++;
359  if (s->y == s->cur_h) {
360  s->pic_state |= PNG_ALLIMAGE;
361  if (s->filter_type == PNG_FILTER_TYPE_LOCO) {
362  if (s->bit_depth == 16) {
363  deloco_rgb16((uint16_t *)ptr, s->row_size / 2,
365  } else {
366  deloco_rgb8(ptr, s->row_size,
368  }
369  }
370  }
371  } else {
372  got_line = 0;
373  for (;;) {
374  ptr = s->image_buf + s->image_linesize * (s->y + s->y_offset) + s->x_offset * s->bpp;
375  if ((ff_png_pass_ymask[s->pass] << (s->y & 7)) & 0x80) {
376  /* if we already read one row, it is time to stop to
377  * wait for the next one */
378  if (got_line)
379  break;
380  png_filter_row(&s->dsp, s->tmp_row, s->crow_buf[0], s->crow_buf + 1,
381  s->last_row, s->pass_row_size, s->bpp);
382  FFSWAP(uint8_t *, s->last_row, s->tmp_row);
383  FFSWAP(unsigned int, s->last_row_size, s->tmp_row_size);
384  got_line = 1;
385  }
386  if ((png_pass_dsp_ymask[s->pass] << (s->y & 7)) & 0x80) {
388  s->color_type, s->last_row);
389  }
390  s->y++;
391  if (s->y == s->cur_h) {
392  memset(s->last_row, 0, s->row_size);
393  for (;;) {
394  if (s->pass == NB_PASSES - 1) {
395  s->pic_state |= PNG_ALLIMAGE;
396  goto the_end;
397  } else {
398  s->pass++;
399  s->y = 0;
401  s->bits_per_pixel,
402  s->cur_w);
403  s->crow_size = s->pass_row_size + 1;
404  if (s->pass_row_size != 0)
405  break;
406  /* skip pass if empty row */
407  }
408  }
409  }
410  }
411 the_end:;
412  }
413 }
414 
416 {
417  int ret;
418  s->zstream.avail_in = FFMIN(length, bytestream2_get_bytes_left(&s->gb));
419  s->zstream.next_in = s->gb.buffer;
420  bytestream2_skip(&s->gb, length);
421 
422  /* decode one line if possible */
423  while (s->zstream.avail_in > 0) {
424  ret = inflate(&s->zstream, Z_PARTIAL_FLUSH);
425  if (ret != Z_OK && ret != Z_STREAM_END) {
426  av_log(s->avctx, AV_LOG_ERROR, "inflate returned error %d\n", ret);
427  return AVERROR_EXTERNAL;
428  }
429  if (s->zstream.avail_out == 0) {
430  if (!(s->pic_state & PNG_ALLIMAGE)) {
431  png_handle_row(s);
432  }
433  s->zstream.avail_out = s->crow_size;
434  s->zstream.next_out = s->crow_buf;
435  }
436  if (ret == Z_STREAM_END && s->zstream.avail_in > 0) {
438  "%d undecompressed bytes left in buffer\n", s->zstream.avail_in);
439  return 0;
440  }
441  }
442  return 0;
443 }
444 
445 static int decode_zbuf(AVBPrint *bp, const uint8_t *data,
446  const uint8_t *data_end)
447 {
448  z_stream zstream;
449  unsigned char *buf;
450  unsigned buf_size;
451  int ret;
452 
453  zstream.zalloc = ff_png_zalloc;
454  zstream.zfree = ff_png_zfree;
455  zstream.opaque = NULL;
456  if (inflateInit(&zstream) != Z_OK)
457  return AVERROR_EXTERNAL;
458  zstream.next_in = data;
459  zstream.avail_in = data_end - data;
461 
462  while (zstream.avail_in > 0) {
463  av_bprint_get_buffer(bp, 2, &buf, &buf_size);
464  if (buf_size < 2) {
465  ret = AVERROR(ENOMEM);
466  goto fail;
467  }
468  zstream.next_out = buf;
469  zstream.avail_out = buf_size - 1;
470  ret = inflate(&zstream, Z_PARTIAL_FLUSH);
471  if (ret != Z_OK && ret != Z_STREAM_END) {
472  ret = AVERROR_EXTERNAL;
473  goto fail;
474  }
475  bp->len += zstream.next_out - buf;
476  if (ret == Z_STREAM_END)
477  break;
478  }
479  inflateEnd(&zstream);
480  bp->str[bp->len] = 0;
481  return 0;
482 
483 fail:
484  inflateEnd(&zstream);
486  return ret;
487 }
488 
489 static uint8_t *iso88591_to_utf8(const uint8_t *in, size_t size_in)
490 {
491  size_t extra = 0, i;
492  uint8_t *out, *q;
493 
494  for (i = 0; i < size_in; i++)
495  extra += in[i] >= 0x80;
496  if (size_in == SIZE_MAX || extra > SIZE_MAX - size_in - 1)
497  return NULL;
498  q = out = av_malloc(size_in + extra + 1);
499  if (!out)
500  return NULL;
501  for (i = 0; i < size_in; i++) {
502  if (in[i] >= 0x80) {
503  *(q++) = 0xC0 | (in[i] >> 6);
504  *(q++) = 0x80 | (in[i] & 0x3F);
505  } else {
506  *(q++) = in[i];
507  }
508  }
509  *(q++) = 0;
510  return out;
511 }
512 
513 static int decode_text_chunk(PNGDecContext *s, uint32_t length, int compressed,
514  AVDictionary **dict)
515 {
516  int ret, method;
517  const uint8_t *data = s->gb.buffer;
518  const uint8_t *data_end = data + length;
519  const uint8_t *keyword = data;
520  const uint8_t *keyword_end = memchr(keyword, 0, data_end - keyword);
521  uint8_t *kw_utf8 = NULL, *text, *txt_utf8 = NULL;
522  unsigned text_len;
523  AVBPrint bp;
524 
525  if (!keyword_end)
526  return AVERROR_INVALIDDATA;
527  data = keyword_end + 1;
528 
529  if (compressed) {
530  if (data == data_end)
531  return AVERROR_INVALIDDATA;
532  method = *(data++);
533  if (method)
534  return AVERROR_INVALIDDATA;
535  if ((ret = decode_zbuf(&bp, data, data_end)) < 0)
536  return ret;
537  text_len = bp.len;
538  ret = av_bprint_finalize(&bp, (char **)&text);
539  if (ret < 0)
540  return ret;
541  } else {
542  text = (uint8_t *)data;
543  text_len = data_end - text;
544  }
545 
546  kw_utf8 = iso88591_to_utf8(keyword, keyword_end - keyword);
547  txt_utf8 = iso88591_to_utf8(text, text_len);
548  if (text != data)
549  av_free(text);
550  if (!(kw_utf8 && txt_utf8)) {
551  av_free(kw_utf8);
552  av_free(txt_utf8);
553  return AVERROR(ENOMEM);
554  }
555 
556  av_dict_set(dict, kw_utf8, txt_utf8,
558  return 0;
559 }
560 
562  uint32_t length)
563 {
564  if (length != 13)
565  return AVERROR_INVALIDDATA;
566 
567  if (s->pic_state & PNG_IDAT) {
568  av_log(avctx, AV_LOG_ERROR, "IHDR after IDAT\n");
569  return AVERROR_INVALIDDATA;
570  }
571 
572  if (s->hdr_state & PNG_IHDR) {
573  av_log(avctx, AV_LOG_ERROR, "Multiple IHDR\n");
574  return AVERROR_INVALIDDATA;
575  }
576 
577  s->width = s->cur_w = bytestream2_get_be32(&s->gb);
578  s->height = s->cur_h = bytestream2_get_be32(&s->gb);
579  if (av_image_check_size(s->width, s->height, 0, avctx)) {
580  s->cur_w = s->cur_h = s->width = s->height = 0;
581  av_log(avctx, AV_LOG_ERROR, "Invalid image size\n");
582  return AVERROR_INVALIDDATA;
583  }
584  s->bit_depth = bytestream2_get_byte(&s->gb);
585  if (s->bit_depth != 1 && s->bit_depth != 2 && s->bit_depth != 4 &&
586  s->bit_depth != 8 && s->bit_depth != 16) {
587  av_log(avctx, AV_LOG_ERROR, "Invalid bit depth\n");
588  goto error;
589  }
590  s->color_type = bytestream2_get_byte(&s->gb);
591  s->compression_type = bytestream2_get_byte(&s->gb);
592  if (s->compression_type) {
593  av_log(avctx, AV_LOG_ERROR, "Invalid compression method %d\n", s->compression_type);
594  goto error;
595  }
596  s->filter_type = bytestream2_get_byte(&s->gb);
597  s->interlace_type = bytestream2_get_byte(&s->gb);
598  bytestream2_skip(&s->gb, 4); /* crc */
599  s->hdr_state |= PNG_IHDR;
600  if (avctx->debug & FF_DEBUG_PICT_INFO)
601  av_log(avctx, AV_LOG_DEBUG, "width=%d height=%d depth=%d color_type=%d "
602  "compression_type=%d filter_type=%d interlace_type=%d\n",
603  s->width, s->height, s->bit_depth, s->color_type,
605 
606  return 0;
607 error:
608  s->cur_w = s->cur_h = s->width = s->height = 0;
609  s->bit_depth = 8;
610  return AVERROR_INVALIDDATA;
611 }
612 
614 {
615  if (s->pic_state & PNG_IDAT) {
616  av_log(avctx, AV_LOG_ERROR, "pHYs after IDAT\n");
617  return AVERROR_INVALIDDATA;
618  }
619  avctx->sample_aspect_ratio.num = bytestream2_get_be32(&s->gb);
620  avctx->sample_aspect_ratio.den = bytestream2_get_be32(&s->gb);
621  if (avctx->sample_aspect_ratio.num < 0 || avctx->sample_aspect_ratio.den < 0)
622  avctx->sample_aspect_ratio = (AVRational){ 0, 1 };
623  bytestream2_skip(&s->gb, 1); /* unit specifier */
624  bytestream2_skip(&s->gb, 4); /* crc */
625 
626  return 0;
627 }
628 
630  uint32_t length, AVFrame *p)
631 {
632  int ret;
633  size_t byte_depth = s->bit_depth > 8 ? 2 : 1;
634 
635  if (!(s->hdr_state & PNG_IHDR)) {
636  av_log(avctx, AV_LOG_ERROR, "IDAT without IHDR\n");
637  return AVERROR_INVALIDDATA;
638  }
639  if (!(s->pic_state & PNG_IDAT)) {
640  /* init image info */
641  ret = ff_set_dimensions(avctx, s->width, s->height);
642  if (ret < 0)
643  return ret;
644 
646  s->bits_per_pixel = s->bit_depth * s->channels;
647  s->bpp = (s->bits_per_pixel + 7) >> 3;
648  s->row_size = (s->cur_w * s->bits_per_pixel + 7) >> 3;
649 
650  if ((s->bit_depth == 2 || s->bit_depth == 4 || s->bit_depth == 8) &&
652  avctx->pix_fmt = AV_PIX_FMT_RGB24;
653  } else if ((s->bit_depth == 2 || s->bit_depth == 4 || s->bit_depth == 8) &&
655  avctx->pix_fmt = AV_PIX_FMT_RGBA;
656  } else if ((s->bit_depth == 2 || s->bit_depth == 4 || s->bit_depth == 8) &&
658  avctx->pix_fmt = AV_PIX_FMT_GRAY8;
659  } else if (s->bit_depth == 16 &&
661  avctx->pix_fmt = AV_PIX_FMT_GRAY16BE;
662  } else if (s->bit_depth == 16 &&
664  avctx->pix_fmt = AV_PIX_FMT_RGB48BE;
665  } else if (s->bit_depth == 16 &&
667  avctx->pix_fmt = AV_PIX_FMT_RGBA64BE;
668  } else if ((s->bits_per_pixel == 1 || s->bits_per_pixel == 2 || s->bits_per_pixel == 4 || s->bits_per_pixel == 8) &&
670  avctx->pix_fmt = AV_PIX_FMT_PAL8;
671  } else if (s->bit_depth == 1 && s->bits_per_pixel == 1 && avctx->codec_id != AV_CODEC_ID_APNG) {
672  avctx->pix_fmt = AV_PIX_FMT_MONOBLACK;
673  } else if (s->bit_depth == 8 &&
675  avctx->pix_fmt = AV_PIX_FMT_YA8;
676  } else if (s->bit_depth == 16 &&
678  avctx->pix_fmt = AV_PIX_FMT_YA16BE;
679  } else {
681  "Bit depth %d color type %d",
682  s->bit_depth, s->color_type);
683  return AVERROR_PATCHWELCOME;
684  }
685 
686  if (s->has_trns && s->color_type != PNG_COLOR_TYPE_PALETTE) {
687  switch (avctx->pix_fmt) {
688  case AV_PIX_FMT_RGB24:
689  avctx->pix_fmt = AV_PIX_FMT_RGBA;
690  break;
691 
692  case AV_PIX_FMT_RGB48BE:
693  avctx->pix_fmt = AV_PIX_FMT_RGBA64BE;
694  break;
695 
696  case AV_PIX_FMT_GRAY8:
697  avctx->pix_fmt = AV_PIX_FMT_YA8;
698  break;
699 
700  case AV_PIX_FMT_GRAY16BE:
701  avctx->pix_fmt = AV_PIX_FMT_YA16BE;
702  break;
703 
704  default:
705  avpriv_request_sample(avctx, "bit depth %d "
706  "and color type %d with TRNS",
707  s->bit_depth, s->color_type);
708  return AVERROR_INVALIDDATA;
709  }
710 
711  s->bpp += byte_depth;
712  }
713 
714  if ((ret = ff_thread_get_buffer(avctx, &s->picture, AV_GET_BUFFER_FLAG_REF)) < 0)
715  return ret;
718  if ((ret = ff_thread_get_buffer(avctx, &s->previous_picture, AV_GET_BUFFER_FLAG_REF)) < 0)
719  return ret;
720  }
722  p->key_frame = 1;
724 
725  ff_thread_finish_setup(avctx);
726 
727  /* compute the compressed row size */
728  if (!s->interlace_type) {
729  s->crow_size = s->row_size + 1;
730  } else {
731  s->pass = 0;
733  s->bits_per_pixel,
734  s->cur_w);
735  s->crow_size = s->pass_row_size + 1;
736  }
737  ff_dlog(avctx, "row_size=%d crow_size =%d\n",
738  s->row_size, s->crow_size);
739  s->image_buf = p->data[0];
740  s->image_linesize = p->linesize[0];
741  /* copy the palette if needed */
742  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
743  memcpy(p->data[1], s->palette, 256 * sizeof(uint32_t));
744  /* empty row is used if differencing to the first row */
746  if (!s->last_row)
747  return AVERROR_INVALIDDATA;
748  if (s->interlace_type ||
751  if (!s->tmp_row)
752  return AVERROR_INVALIDDATA;
753  }
754  /* compressed row */
756  if (!s->buffer)
757  return AVERROR(ENOMEM);
758 
759  /* we want crow_buf+1 to be 16-byte aligned */
760  s->crow_buf = s->buffer + 15;
761  s->zstream.avail_out = s->crow_size;
762  s->zstream.next_out = s->crow_buf;
763  }
764 
765  s->pic_state |= PNG_IDAT;
766 
767  /* set image to non-transparent bpp while decompressing */
769  s->bpp -= byte_depth;
770 
771  ret = png_decode_idat(s, length);
772 
774  s->bpp += byte_depth;
775 
776  if (ret < 0)
777  return ret;
778 
779  bytestream2_skip(&s->gb, 4); /* crc */
780 
781  return 0;
782 }
783 
785  uint32_t length)
786 {
787  int n, i, r, g, b;
788 
789  if ((length % 3) != 0 || length > 256 * 3)
790  return AVERROR_INVALIDDATA;
791  /* read the palette */
792  n = length / 3;
793  for (i = 0; i < n; i++) {
794  r = bytestream2_get_byte(&s->gb);
795  g = bytestream2_get_byte(&s->gb);
796  b = bytestream2_get_byte(&s->gb);
797  s->palette[i] = (0xFFU << 24) | (r << 16) | (g << 8) | b;
798  }
799  for (; i < 256; i++)
800  s->palette[i] = (0xFFU << 24);
801  s->hdr_state |= PNG_PLTE;
802  bytestream2_skip(&s->gb, 4); /* crc */
803 
804  return 0;
805 }
806 
808  uint32_t length)
809 {
810  int v, i;
811 
812  if (!(s->hdr_state & PNG_IHDR)) {
813  av_log(avctx, AV_LOG_ERROR, "trns before IHDR\n");
814  return AVERROR_INVALIDDATA;
815  }
816 
817  if (s->pic_state & PNG_IDAT) {
818  av_log(avctx, AV_LOG_ERROR, "trns after IDAT\n");
819  return AVERROR_INVALIDDATA;
820  }
821 
823  if (length > 256 || !(s->hdr_state & PNG_PLTE))
824  return AVERROR_INVALIDDATA;
825 
826  for (i = 0; i < length; i++) {
827  unsigned v = bytestream2_get_byte(&s->gb);
828  s->palette[i] = (s->palette[i] & 0x00ffffff) | (v << 24);
829  }
830  } else if (s->color_type == PNG_COLOR_TYPE_GRAY || s->color_type == PNG_COLOR_TYPE_RGB) {
831  if ((s->color_type == PNG_COLOR_TYPE_GRAY && length != 2) ||
832  (s->color_type == PNG_COLOR_TYPE_RGB && length != 6) ||
833  s->bit_depth == 1)
834  return AVERROR_INVALIDDATA;
835 
836  for (i = 0; i < length / 2; i++) {
837  /* only use the least significant bits */
838  v = av_mod_uintp2(bytestream2_get_be16(&s->gb), s->bit_depth);
839 
840  if (s->bit_depth > 8)
841  AV_WB16(&s->transparent_color_be[2 * i], v);
842  else
843  s->transparent_color_be[i] = v;
844  }
845  } else {
846  return AVERROR_INVALIDDATA;
847  }
848 
849  bytestream2_skip(&s->gb, 4); /* crc */
850  s->has_trns = 1;
851 
852  return 0;
853 }
854 
856 {
857  int ret, cnt = 0;
858  uint8_t *data, profile_name[82];
859  AVBPrint bp;
860  AVFrameSideData *sd;
861 
862  while ((profile_name[cnt++] = bytestream2_get_byte(&s->gb)) && cnt < 81);
863  if (cnt > 80) {
864  av_log(s->avctx, AV_LOG_ERROR, "iCCP with invalid name!\n");
865  return AVERROR_INVALIDDATA;
866  }
867 
868  length = FFMAX(length - cnt, 0);
869 
870  if (bytestream2_get_byte(&s->gb) != 0) {
871  av_log(s->avctx, AV_LOG_ERROR, "iCCP with invalid compression!\n");
872  return AVERROR_INVALIDDATA;
873  }
874 
875  length = FFMAX(length - 1, 0);
876 
877  if ((ret = decode_zbuf(&bp, s->gb.buffer, s->gb.buffer + length)) < 0)
878  return ret;
879 
880  ret = av_bprint_finalize(&bp, (char **)&data);
881  if (ret < 0)
882  return ret;
883 
885  if (!sd) {
886  av_free(data);
887  return AVERROR(ENOMEM);
888  }
889 
890  av_dict_set(&sd->metadata, "name", profile_name, 0);
891  memcpy(sd->data, data, bp.len);
892  av_free(data);
893 
894  /* ICC compressed data and CRC */
895  bytestream2_skip(&s->gb, length + 4);
896 
897  return 0;
898 }
899 
901 {
902  if (s->bits_per_pixel == 1 && s->color_type == PNG_COLOR_TYPE_PALETTE) {
903  int i, j, k;
904  uint8_t *pd = p->data[0];
905  for (j = 0; j < s->height; j++) {
906  i = s->width / 8;
907  for (k = 7; k >= 1; k--)
908  if ((s->width&7) >= k)
909  pd[8*i + k - 1] = (pd[i]>>8-k) & 1;
910  for (i--; i >= 0; i--) {
911  pd[8*i + 7]= pd[i] & 1;
912  pd[8*i + 6]= (pd[i]>>1) & 1;
913  pd[8*i + 5]= (pd[i]>>2) & 1;
914  pd[8*i + 4]= (pd[i]>>3) & 1;
915  pd[8*i + 3]= (pd[i]>>4) & 1;
916  pd[8*i + 2]= (pd[i]>>5) & 1;
917  pd[8*i + 1]= (pd[i]>>6) & 1;
918  pd[8*i + 0]= pd[i]>>7;
919  }
920  pd += s->image_linesize;
921  }
922  } else if (s->bits_per_pixel == 2) {
923  int i, j;
924  uint8_t *pd = p->data[0];
925  for (j = 0; j < s->height; j++) {
926  i = s->width / 4;
928  if ((s->width&3) >= 3) pd[4*i + 2]= (pd[i] >> 2) & 3;
929  if ((s->width&3) >= 2) pd[4*i + 1]= (pd[i] >> 4) & 3;
930  if ((s->width&3) >= 1) pd[4*i + 0]= pd[i] >> 6;
931  for (i--; i >= 0; i--) {
932  pd[4*i + 3]= pd[i] & 3;
933  pd[4*i + 2]= (pd[i]>>2) & 3;
934  pd[4*i + 1]= (pd[i]>>4) & 3;
935  pd[4*i + 0]= pd[i]>>6;
936  }
937  } else {
938  if ((s->width&3) >= 3) pd[4*i + 2]= ((pd[i]>>2) & 3)*0x55;
939  if ((s->width&3) >= 2) pd[4*i + 1]= ((pd[i]>>4) & 3)*0x55;
940  if ((s->width&3) >= 1) pd[4*i + 0]= ( pd[i]>>6 )*0x55;
941  for (i--; i >= 0; i--) {
942  pd[4*i + 3]= ( pd[i] & 3)*0x55;
943  pd[4*i + 2]= ((pd[i]>>2) & 3)*0x55;
944  pd[4*i + 1]= ((pd[i]>>4) & 3)*0x55;
945  pd[4*i + 0]= ( pd[i]>>6 )*0x55;
946  }
947  }
948  pd += s->image_linesize;
949  }
950  } else if (s->bits_per_pixel == 4) {
951  int i, j;
952  uint8_t *pd = p->data[0];
953  for (j = 0; j < s->height; j++) {
954  i = s->width/2;
956  if (s->width&1) pd[2*i+0]= pd[i]>>4;
957  for (i--; i >= 0; i--) {
958  pd[2*i + 1] = pd[i] & 15;
959  pd[2*i + 0] = pd[i] >> 4;
960  }
961  } else {
962  if (s->width & 1) pd[2*i + 0]= (pd[i] >> 4) * 0x11;
963  for (i--; i >= 0; i--) {
964  pd[2*i + 1] = (pd[i] & 15) * 0x11;
965  pd[2*i + 0] = (pd[i] >> 4) * 0x11;
966  }
967  }
968  pd += s->image_linesize;
969  }
970  }
971 }
972 
974  uint32_t length)
975 {
976  uint32_t sequence_number;
978 
979  if (length != 26)
980  return AVERROR_INVALIDDATA;
981 
982  if (!(s->hdr_state & PNG_IHDR)) {
983  av_log(avctx, AV_LOG_ERROR, "fctl before IHDR\n");
984  return AVERROR_INVALIDDATA;
985  }
986 
987  s->last_w = s->cur_w;
988  s->last_h = s->cur_h;
989  s->last_x_offset = s->x_offset;
990  s->last_y_offset = s->y_offset;
991  s->last_dispose_op = s->dispose_op;
992 
993  sequence_number = bytestream2_get_be32(&s->gb);
994  cur_w = bytestream2_get_be32(&s->gb);
995  cur_h = bytestream2_get_be32(&s->gb);
996  x_offset = bytestream2_get_be32(&s->gb);
997  y_offset = bytestream2_get_be32(&s->gb);
998  bytestream2_skip(&s->gb, 4); /* delay_num (2), delay_den (2) */
999  dispose_op = bytestream2_get_byte(&s->gb);
1000  blend_op = bytestream2_get_byte(&s->gb);
1001  bytestream2_skip(&s->gb, 4); /* crc */
1002 
1003  if (sequence_number == 0 &&
1004  (cur_w != s->width ||
1005  cur_h != s->height ||
1006  x_offset != 0 ||
1007  y_offset != 0) ||
1008  cur_w <= 0 || cur_h <= 0 ||
1009  x_offset < 0 || y_offset < 0 ||
1010  cur_w > s->width - x_offset|| cur_h > s->height - y_offset)
1011  return AVERROR_INVALIDDATA;
1012 
1013  if (blend_op != APNG_BLEND_OP_OVER && blend_op != APNG_BLEND_OP_SOURCE) {
1014  av_log(avctx, AV_LOG_ERROR, "Invalid blend_op %d\n", blend_op);
1015  return AVERROR_INVALIDDATA;
1016  }
1017 
1018  if ((sequence_number == 0 || !s->previous_picture.f->data[0]) &&
1019  dispose_op == APNG_DISPOSE_OP_PREVIOUS) {
1020  // No previous frame to revert to for the first frame
1021  // Spec says to just treat it as a APNG_DISPOSE_OP_BACKGROUND
1022  dispose_op = APNG_DISPOSE_OP_BACKGROUND;
1023  }
1024 
1025  if (blend_op == APNG_BLEND_OP_OVER && !s->has_trns && (
1026  avctx->pix_fmt == AV_PIX_FMT_RGB24 ||
1027  avctx->pix_fmt == AV_PIX_FMT_RGB48BE ||
1028  avctx->pix_fmt == AV_PIX_FMT_PAL8 ||
1029  avctx->pix_fmt == AV_PIX_FMT_GRAY8 ||
1030  avctx->pix_fmt == AV_PIX_FMT_GRAY16BE ||
1031  avctx->pix_fmt == AV_PIX_FMT_MONOBLACK
1032  )) {
1033  // APNG_BLEND_OP_OVER is the same as APNG_BLEND_OP_SOURCE when there is no alpha channel
1034  blend_op = APNG_BLEND_OP_SOURCE;
1035  }
1036 
1037  s->cur_w = cur_w;
1038  s->cur_h = cur_h;
1039  s->x_offset = x_offset;
1040  s->y_offset = y_offset;
1041  s->dispose_op = dispose_op;
1042  s->blend_op = blend_op;
1043 
1044  return 0;
1045 }
1046 
1048 {
1049  int i, j;
1050  uint8_t *pd = p->data[0];
1051  uint8_t *pd_last = s->last_picture.f->data[0];
1052  int ls = FFMIN(av_image_get_linesize(p->format, s->width, 0), s->width * s->bpp);
1053 
1054  ff_thread_await_progress(&s->last_picture, INT_MAX, 0);
1055  for (j = 0; j < s->height; j++) {
1056  for (i = 0; i < ls; i++)
1057  pd[i] += pd_last[i];
1058  pd += s->image_linesize;
1059  pd_last += s->image_linesize;
1060  }
1061 }
1062 
1063 // divide by 255 and round to nearest
1064 // apply a fast variant: (X+127)/255 = ((X+127)*257+257)>>16 = ((X+128)*257)>>16
1065 #define FAST_DIV255(x) ((((x) + 128) * 257) >> 16)
1066 
1068  AVFrame *p)
1069 {
1070  size_t x, y;
1071  uint8_t *buffer;
1072 
1073  if (s->blend_op == APNG_BLEND_OP_OVER &&
1074  avctx->pix_fmt != AV_PIX_FMT_RGBA &&
1075  avctx->pix_fmt != AV_PIX_FMT_GRAY8A &&
1076  avctx->pix_fmt != AV_PIX_FMT_PAL8) {
1077  avpriv_request_sample(avctx, "Blending with pixel format %s",
1078  av_get_pix_fmt_name(avctx->pix_fmt));
1079  return AVERROR_PATCHWELCOME;
1080  }
1081 
1082  buffer = av_malloc_array(s->image_linesize, s->height);
1083  if (!buffer)
1084  return AVERROR(ENOMEM);
1085 
1086 
1087  // Do the disposal operation specified by the last frame on the frame
1089  ff_thread_await_progress(&s->last_picture, INT_MAX, 0);
1090  memcpy(buffer, s->last_picture.f->data[0], s->image_linesize * s->height);
1091 
1093  for (y = s->last_y_offset; y < s->last_y_offset + s->last_h; ++y)
1094  memset(buffer + s->image_linesize * y + s->bpp * s->last_x_offset, 0, s->bpp * s->last_w);
1095 
1096  memcpy(s->previous_picture.f->data[0], buffer, s->image_linesize * s->height);
1098  } else {
1099  ff_thread_await_progress(&s->previous_picture, INT_MAX, 0);
1100  memcpy(buffer, s->previous_picture.f->data[0], s->image_linesize * s->height);
1101  }
1102 
1103  // Perform blending
1104  if (s->blend_op == APNG_BLEND_OP_SOURCE) {
1105  for (y = s->y_offset; y < s->y_offset + s->cur_h; ++y) {
1106  size_t row_start = s->image_linesize * y + s->bpp * s->x_offset;
1107  memcpy(buffer + row_start, p->data[0] + row_start, s->bpp * s->cur_w);
1108  }
1109  } else { // APNG_BLEND_OP_OVER
1110  for (y = s->y_offset; y < s->y_offset + s->cur_h; ++y) {
1111  uint8_t *foreground = p->data[0] + s->image_linesize * y + s->bpp * s->x_offset;
1112  uint8_t *background = buffer + s->image_linesize * y + s->bpp * s->x_offset;
1113  for (x = s->x_offset; x < s->x_offset + s->cur_w; ++x, foreground += s->bpp, background += s->bpp) {
1114  size_t b;
1115  uint8_t foreground_alpha, background_alpha, output_alpha;
1116  uint8_t output[10];
1117 
1118  // Since we might be blending alpha onto alpha, we use the following equations:
1119  // output_alpha = foreground_alpha + (1 - foreground_alpha) * background_alpha
1120  // output = (foreground_alpha * foreground + (1 - foreground_alpha) * background_alpha * background) / output_alpha
1121 
1122  switch (avctx->pix_fmt) {
1123  case AV_PIX_FMT_RGBA:
1124  foreground_alpha = foreground[3];
1125  background_alpha = background[3];
1126  break;
1127 
1128  case AV_PIX_FMT_GRAY8A:
1129  foreground_alpha = foreground[1];
1130  background_alpha = background[1];
1131  break;
1132 
1133  case AV_PIX_FMT_PAL8:
1134  foreground_alpha = s->palette[foreground[0]] >> 24;
1135  background_alpha = s->palette[background[0]] >> 24;
1136  break;
1137  }
1138 
1139  if (foreground_alpha == 0)
1140  continue;
1141 
1142  if (foreground_alpha == 255) {
1143  memcpy(background, foreground, s->bpp);
1144  continue;
1145  }
1146 
1147  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
1148  // TODO: Alpha blending with PAL8 will likely need the entire image converted over to RGBA first
1149  avpriv_request_sample(avctx, "Alpha blending palette samples");
1150  background[0] = foreground[0];
1151  continue;
1152  }
1153 
1154  output_alpha = foreground_alpha + FAST_DIV255((255 - foreground_alpha) * background_alpha);
1155 
1156  av_assert0(s->bpp <= 10);
1157 
1158  for (b = 0; b < s->bpp - 1; ++b) {
1159  if (output_alpha == 0) {
1160  output[b] = 0;
1161  } else if (background_alpha == 255) {
1162  output[b] = FAST_DIV255(foreground_alpha * foreground[b] + (255 - foreground_alpha) * background[b]);
1163  } else {
1164  output[b] = (255 * foreground_alpha * foreground[b] + (255 - foreground_alpha) * background_alpha * background[b]) / (255 * output_alpha);
1165  }
1166  }
1167  output[b] = output_alpha;
1168  memcpy(background, output, s->bpp);
1169  }
1170  }
1171  }
1172 
1173  // Copy blended buffer into the frame and free
1174  memcpy(p->data[0], buffer, s->image_linesize * s->height);
1175  av_free(buffer);
1176 
1177  return 0;
1178 }
1179 
1181  AVFrame *p, AVPacket *avpkt)
1182 {
1183  const AVCRC *crc_tab = av_crc_get_table(AV_CRC_32_IEEE_LE);
1184  AVDictionary **metadatap = NULL;
1185  uint32_t tag, length;
1186  int decode_next_dat = 0;
1187  int i, ret;
1188 
1189  for (;;) {
1190  length = bytestream2_get_bytes_left(&s->gb);
1191  if (length <= 0) {
1192 
1193  if (avctx->codec_id == AV_CODEC_ID_PNG &&
1194  avctx->skip_frame == AVDISCARD_ALL) {
1195  return 0;
1196  }
1197 
1198  if (CONFIG_APNG_DECODER && avctx->codec_id == AV_CODEC_ID_APNG && length == 0) {
1199  if (!(s->pic_state & PNG_IDAT))
1200  return 0;
1201  else
1202  goto exit_loop;
1203  }
1204  av_log(avctx, AV_LOG_ERROR, "%d bytes left\n", length);
1205  if ( s->pic_state & PNG_ALLIMAGE
1207  goto exit_loop;
1208  ret = AVERROR_INVALIDDATA;
1209  goto fail;
1210  }
1211 
1212  length = bytestream2_get_be32(&s->gb);
1213  if (length > 0x7fffffff || length > bytestream2_get_bytes_left(&s->gb)) {
1214  av_log(avctx, AV_LOG_ERROR, "chunk too big\n");
1215  ret = AVERROR_INVALIDDATA;
1216  goto fail;
1217  }
1218  if (avctx->err_recognition & (AV_EF_CRCCHECK | AV_EF_IGNORE_ERR)) {
1219  uint32_t crc_sig = AV_RB32(s->gb.buffer + length + 4);
1220  uint32_t crc_cal = ~av_crc(crc_tab, UINT32_MAX, s->gb.buffer, length + 4);
1221  if (crc_sig ^ crc_cal) {
1222  av_log(avctx, AV_LOG_ERROR, "CRC mismatch in chunk");
1223  if (avctx->err_recognition & AV_EF_EXPLODE) {
1224  av_log(avctx, AV_LOG_ERROR, ", quitting\n");
1225  ret = AVERROR_INVALIDDATA;
1226  goto fail;
1227  }
1228  av_log(avctx, AV_LOG_ERROR, ", skipping\n");
1229  bytestream2_skip(&s->gb, 4); /* tag */
1230  goto skip_tag;
1231  }
1232  }
1233  tag = bytestream2_get_le32(&s->gb);
1234  if (avctx->debug & FF_DEBUG_STARTCODE)
1235  av_log(avctx, AV_LOG_DEBUG, "png: tag=%s length=%u\n",
1236  av_fourcc2str(tag), length);
1237 
1238  if (avctx->codec_id == AV_CODEC_ID_PNG &&
1239  avctx->skip_frame == AVDISCARD_ALL) {
1240  switch(tag) {
1241  case MKTAG('I', 'H', 'D', 'R'):
1242  case MKTAG('p', 'H', 'Y', 's'):
1243  case MKTAG('t', 'E', 'X', 't'):
1244  case MKTAG('I', 'D', 'A', 'T'):
1245  case MKTAG('t', 'R', 'N', 'S'):
1246  break;
1247  default:
1248  goto skip_tag;
1249  }
1250  }
1251 
1252  metadatap = &p->metadata;
1253  switch (tag) {
1254  case MKTAG('I', 'H', 'D', 'R'):
1255  if ((ret = decode_ihdr_chunk(avctx, s, length)) < 0)
1256  goto fail;
1257  break;
1258  case MKTAG('p', 'H', 'Y', 's'):
1259  if ((ret = decode_phys_chunk(avctx, s)) < 0)
1260  goto fail;
1261  break;
1262  case MKTAG('f', 'c', 'T', 'L'):
1263  if (!CONFIG_APNG_DECODER || avctx->codec_id != AV_CODEC_ID_APNG)
1264  goto skip_tag;
1265  if ((ret = decode_fctl_chunk(avctx, s, length)) < 0)
1266  goto fail;
1267  decode_next_dat = 1;
1268  break;
1269  case MKTAG('f', 'd', 'A', 'T'):
1270  if (!CONFIG_APNG_DECODER || avctx->codec_id != AV_CODEC_ID_APNG)
1271  goto skip_tag;
1272  if (!decode_next_dat || length < 4) {
1273  ret = AVERROR_INVALIDDATA;
1274  goto fail;
1275  }
1276  bytestream2_get_be32(&s->gb);
1277  length -= 4;
1278  /* fallthrough */
1279  case MKTAG('I', 'D', 'A', 'T'):
1280  if (CONFIG_APNG_DECODER && avctx->codec_id == AV_CODEC_ID_APNG && !decode_next_dat)
1281  goto skip_tag;
1282  if ((ret = decode_idat_chunk(avctx, s, length, p)) < 0)
1283  goto fail;
1284  break;
1285  case MKTAG('P', 'L', 'T', 'E'):
1286  if (decode_plte_chunk(avctx, s, length) < 0)
1287  goto skip_tag;
1288  break;
1289  case MKTAG('t', 'R', 'N', 'S'):
1290  if (decode_trns_chunk(avctx, s, length) < 0)
1291  goto skip_tag;
1292  break;
1293  case MKTAG('t', 'E', 'X', 't'):
1294  if (decode_text_chunk(s, length, 0, metadatap) < 0)
1295  av_log(avctx, AV_LOG_WARNING, "Broken tEXt chunk\n");
1296  bytestream2_skip(&s->gb, length + 4);
1297  break;
1298  case MKTAG('z', 'T', 'X', 't'):
1299  if (decode_text_chunk(s, length, 1, metadatap) < 0)
1300  av_log(avctx, AV_LOG_WARNING, "Broken zTXt chunk\n");
1301  bytestream2_skip(&s->gb, length + 4);
1302  break;
1303  case MKTAG('s', 'T', 'E', 'R'): {
1304  int mode = bytestream2_get_byte(&s->gb);
1306  if (!stereo3d) {
1307  ret = AVERROR(ENOMEM);
1308  goto fail;
1309  }
1310 
1311  if (mode == 0 || mode == 1) {
1312  stereo3d->type = AV_STEREO3D_SIDEBYSIDE;
1313  stereo3d->flags = mode ? 0 : AV_STEREO3D_FLAG_INVERT;
1314  } else {
1315  av_log(avctx, AV_LOG_WARNING,
1316  "Unknown value in sTER chunk (%d)\n", mode);
1317  }
1318  bytestream2_skip(&s->gb, 4); /* crc */
1319  break;
1320  }
1321  case MKTAG('i', 'C', 'C', 'P'): {
1322  if ((ret = decode_iccp_chunk(s, length, p)) < 0)
1323  goto fail;
1324  break;
1325  }
1326  case MKTAG('c', 'H', 'R', 'M'): {
1328  if (!mdm) {
1329  ret = AVERROR(ENOMEM);
1330  goto fail;
1331  }
1332 
1333  mdm->white_point[0] = av_make_q(bytestream2_get_be32(&s->gb), 100000);
1334  mdm->white_point[1] = av_make_q(bytestream2_get_be32(&s->gb), 100000);
1335 
1336  /* RGB Primaries */
1337  for (i = 0; i < 3; i++) {
1338  mdm->display_primaries[i][0] = av_make_q(bytestream2_get_be32(&s->gb), 100000);
1339  mdm->display_primaries[i][1] = av_make_q(bytestream2_get_be32(&s->gb), 100000);
1340  }
1341 
1342  mdm->has_primaries = 1;
1343  bytestream2_skip(&s->gb, 4); /* crc */
1344  break;
1345  }
1346  case MKTAG('g', 'A', 'M', 'A'): {
1347  AVBPrint bp;
1348  char *gamma_str;
1349  int num = bytestream2_get_be32(&s->gb);
1350 
1352  av_bprintf(&bp, "%i/%i", num, 100000);
1353  ret = av_bprint_finalize(&bp, &gamma_str);
1354  if (ret < 0)
1355  return ret;
1356 
1357  av_dict_set(&p->metadata, "gamma", gamma_str, AV_DICT_DONT_STRDUP_VAL);
1358 
1359  bytestream2_skip(&s->gb, 4); /* crc */
1360  break;
1361  }
1362  case MKTAG('I', 'E', 'N', 'D'):
1363  if (!(s->pic_state & PNG_ALLIMAGE))
1364  av_log(avctx, AV_LOG_ERROR, "IEND without all image\n");
1365  if (!(s->pic_state & (PNG_ALLIMAGE|PNG_IDAT))) {
1366  ret = AVERROR_INVALIDDATA;
1367  goto fail;
1368  }
1369  bytestream2_skip(&s->gb, 4); /* crc */
1370  goto exit_loop;
1371  default:
1372  /* skip tag */
1373 skip_tag:
1374  bytestream2_skip(&s->gb, length + 4);
1375  break;
1376  }
1377  }
1378 exit_loop:
1379 
1380  if (avctx->codec_id == AV_CODEC_ID_PNG &&
1381  avctx->skip_frame == AVDISCARD_ALL) {
1382  return 0;
1383  }
1384 
1386  return AVERROR_INVALIDDATA;
1387 
1388  if (s->bits_per_pixel <= 4)
1389  handle_small_bpp(s, p);
1390 
1391  /* apply transparency if needed */
1392  if (s->has_trns && s->color_type != PNG_COLOR_TYPE_PALETTE) {
1393  size_t byte_depth = s->bit_depth > 8 ? 2 : 1;
1394  size_t raw_bpp = s->bpp - byte_depth;
1395  unsigned x, y;
1396 
1397  av_assert0(s->bit_depth > 1);
1398 
1399  for (y = 0; y < s->height; ++y) {
1400  uint8_t *row = &s->image_buf[s->image_linesize * y];
1401 
1402  if (s->bpp == 2 && byte_depth == 1) {
1403  uint8_t *pixel = &row[2 * s->width - 1];
1404  uint8_t *rowp = &row[1 * s->width - 1];
1405  int tcolor = s->transparent_color_be[0];
1406  for (x = s->width; x > 0; --x) {
1407  *pixel-- = *rowp == tcolor ? 0 : 0xff;
1408  *pixel-- = *rowp--;
1409  }
1410  } else if (s->bpp == 4 && byte_depth == 1) {
1411  uint8_t *pixel = &row[4 * s->width - 1];
1412  uint8_t *rowp = &row[3 * s->width - 1];
1413  int tcolor = AV_RL24(s->transparent_color_be);
1414  for (x = s->width; x > 0; --x) {
1415  *pixel-- = AV_RL24(rowp-2) == tcolor ? 0 : 0xff;
1416  *pixel-- = *rowp--;
1417  *pixel-- = *rowp--;
1418  *pixel-- = *rowp--;
1419  }
1420  } else {
1421  /* since we're updating in-place, we have to go from right to left */
1422  for (x = s->width; x > 0; --x) {
1423  uint8_t *pixel = &row[s->bpp * (x - 1)];
1424  memmove(pixel, &row[raw_bpp * (x - 1)], raw_bpp);
1425 
1426  if (!memcmp(pixel, s->transparent_color_be, raw_bpp)) {
1427  memset(&pixel[raw_bpp], 0, byte_depth);
1428  } else {
1429  memset(&pixel[raw_bpp], 0xff, byte_depth);
1430  }
1431  }
1432  }
1433  }
1434  }
1435 
1436  /* handle P-frames only if a predecessor frame is available */
1437  if (s->last_picture.f->data[0]) {
1438  if ( !(avpkt->flags & AV_PKT_FLAG_KEY) && avctx->codec_tag != AV_RL32("MPNG")
1439  && s->last_picture.f->width == p->width
1440  && s->last_picture.f->height== p->height
1441  && s->last_picture.f->format== p->format
1442  ) {
1443  if (CONFIG_PNG_DECODER && avctx->codec_id != AV_CODEC_ID_APNG)
1444  handle_p_frame_png(s, p);
1445  else if (CONFIG_APNG_DECODER &&
1446  s->previous_picture.f->width == p->width &&
1447  s->previous_picture.f->height== p->height &&
1448  s->previous_picture.f->format== p->format &&
1449  avctx->codec_id == AV_CODEC_ID_APNG &&
1450  (ret = handle_p_frame_apng(avctx, s, p)) < 0)
1451  goto fail;
1452  }
1453  }
1454  ff_thread_report_progress(&s->picture, INT_MAX, 0);
1456 
1457  return 0;
1458 
1459 fail:
1460  ff_thread_report_progress(&s->picture, INT_MAX, 0);
1462  return ret;
1463 }
1464 
1465 #if CONFIG_PNG_DECODER
1466 static int decode_frame_png(AVCodecContext *avctx,
1467  void *data, int *got_frame,
1468  AVPacket *avpkt)
1469 {
1470  PNGDecContext *const s = avctx->priv_data;
1471  const uint8_t *buf = avpkt->data;
1472  int buf_size = avpkt->size;
1473  AVFrame *p;
1474  int64_t sig;
1475  int ret;
1476 
1479  p = s->picture.f;
1480 
1481  bytestream2_init(&s->gb, buf, buf_size);
1482 
1483  /* check signature */
1484  sig = bytestream2_get_be64(&s->gb);
1485  if (sig != PNGSIG &&
1486  sig != MNGSIG) {
1487  av_log(avctx, AV_LOG_ERROR, "Invalid PNG signature 0x%08"PRIX64".\n", sig);
1488  return AVERROR_INVALIDDATA;
1489  }
1490 
1491  s->y = s->has_trns = 0;
1492  s->hdr_state = 0;
1493  s->pic_state = 0;
1494 
1495  /* init the zlib */
1496  s->zstream.zalloc = ff_png_zalloc;
1497  s->zstream.zfree = ff_png_zfree;
1498  s->zstream.opaque = NULL;
1499  ret = inflateInit(&s->zstream);
1500  if (ret != Z_OK) {
1501  av_log(avctx, AV_LOG_ERROR, "inflateInit returned error %d\n", ret);
1502  return AVERROR_EXTERNAL;
1503  }
1504 
1505  if ((ret = decode_frame_common(avctx, s, p, avpkt)) < 0)
1506  goto the_end;
1507 
1508  if (avctx->skip_frame == AVDISCARD_ALL) {
1509  *got_frame = 0;
1510  ret = bytestream2_tell(&s->gb);
1511  goto the_end;
1512  }
1513 
1514  if ((ret = av_frame_ref(data, s->picture.f)) < 0)
1515  goto the_end;
1516 
1517  *got_frame = 1;
1518 
1519  ret = bytestream2_tell(&s->gb);
1520 the_end:
1521  inflateEnd(&s->zstream);
1522  s->crow_buf = NULL;
1523  return ret;
1524 }
1525 #endif
1526 
1527 #if CONFIG_APNG_DECODER
1528 static int decode_frame_apng(AVCodecContext *avctx,
1529  void *data, int *got_frame,
1530  AVPacket *avpkt)
1531 {
1532  PNGDecContext *const s = avctx->priv_data;
1533  int ret;
1534  AVFrame *p;
1535 
1538  p = s->picture.f;
1539 
1540  if (!(s->hdr_state & PNG_IHDR)) {
1541  if (!avctx->extradata_size)
1542  return AVERROR_INVALIDDATA;
1543 
1544  /* only init fields, there is no zlib use in extradata */
1545  s->zstream.zalloc = ff_png_zalloc;
1546  s->zstream.zfree = ff_png_zfree;
1547 
1548  bytestream2_init(&s->gb, avctx->extradata, avctx->extradata_size);
1549  if ((ret = decode_frame_common(avctx, s, p, avpkt)) < 0)
1550  goto end;
1551  }
1552 
1553  /* reset state for a new frame */
1554  if ((ret = inflateInit(&s->zstream)) != Z_OK) {
1555  av_log(avctx, AV_LOG_ERROR, "inflateInit returned error %d\n", ret);
1556  ret = AVERROR_EXTERNAL;
1557  goto end;
1558  }
1559  s->y = 0;
1560  s->pic_state = 0;
1561  bytestream2_init(&s->gb, avpkt->data, avpkt->size);
1562  if ((ret = decode_frame_common(avctx, s, p, avpkt)) < 0)
1563  goto end;
1564 
1565  if (!(s->pic_state & PNG_ALLIMAGE))
1566  av_log(avctx, AV_LOG_WARNING, "Frame did not contain a complete image\n");
1567  if (!(s->pic_state & (PNG_ALLIMAGE|PNG_IDAT))) {
1568  ret = AVERROR_INVALIDDATA;
1569  goto end;
1570  }
1571  if ((ret = av_frame_ref(data, s->picture.f)) < 0)
1572  goto end;
1573 
1574  *got_frame = 1;
1575  ret = bytestream2_tell(&s->gb);
1576 
1577 end:
1578  inflateEnd(&s->zstream);
1579  return ret;
1580 }
1581 #endif
1582 
1583 #if CONFIG_LSCR_DECODER
1584 static int decode_frame_lscr(AVCodecContext *avctx,
1585  void *data, int *got_frame,
1586  AVPacket *avpkt)
1587 {
1588  PNGDecContext *const s = avctx->priv_data;
1589  GetByteContext *gb = &s->gb;
1590  AVFrame *frame = data;
1591  int ret, nb_blocks, offset = 0;
1592 
1593  if (avpkt->size < 2)
1594  return AVERROR_INVALIDDATA;
1595 
1596  bytestream2_init(gb, avpkt->data, avpkt->size);
1597 
1598  if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
1599  return ret;
1600 
1601  nb_blocks = bytestream2_get_le16(gb);
1602  if (bytestream2_get_bytes_left(gb) < 2 + nb_blocks * (12 + 8))
1603  return AVERROR_INVALIDDATA;
1604 
1605  if (s->last_picture.f->data[0]) {
1606  ret = av_frame_copy(frame, s->last_picture.f);
1607  if (ret < 0)
1608  return ret;
1609  }
1610 
1611  for (int b = 0; b < nb_blocks; b++) {
1612  int x, y, x2, y2, w, h, left;
1613  uint32_t csize, size;
1614 
1615  s->zstream.zalloc = ff_png_zalloc;
1616  s->zstream.zfree = ff_png_zfree;
1617  s->zstream.opaque = NULL;
1618 
1619  if ((ret = inflateInit(&s->zstream)) != Z_OK) {
1620  av_log(avctx, AV_LOG_ERROR, "inflateInit returned error %d\n", ret);
1621  ret = AVERROR_EXTERNAL;
1622  goto end;
1623  }
1624 
1625  bytestream2_seek(gb, 2 + b * 12, SEEK_SET);
1626 
1627  x = bytestream2_get_le16(gb);
1628  y = bytestream2_get_le16(gb);
1629  x2 = bytestream2_get_le16(gb);
1630  y2 = bytestream2_get_le16(gb);
1631  s->width = s->cur_w = w = x2-x;
1632  s->height = s->cur_h = h = y2-y;
1633 
1634  if (w <= 0 || x < 0 || x >= avctx->width || w + x > avctx->width ||
1635  h <= 0 || y < 0 || y >= avctx->height || h + y > avctx->height) {
1636  ret = AVERROR_INVALIDDATA;
1637  goto end;
1638  }
1639 
1640  size = bytestream2_get_le32(gb);
1641 
1642  frame->key_frame = (nb_blocks == 1) &&
1643  (w == avctx->width) &&
1644  (h == avctx->height) &&
1645  (x == 0) && (y == 0);
1646 
1647  bytestream2_seek(gb, 2 + nb_blocks * 12 + offset, SEEK_SET);
1648  csize = bytestream2_get_be32(gb);
1649  if (bytestream2_get_le32(gb) != MKTAG('I', 'D', 'A', 'T')) {
1650  ret = AVERROR_INVALIDDATA;
1651  goto end;
1652  }
1653 
1654  offset += size;
1655  left = size;
1656 
1657  s->y = 0;
1658  s->row_size = w * 3;
1659 
1660  av_fast_padded_malloc(&s->buffer, &s->buffer_size, s->row_size + 16);
1661  if (!s->buffer) {
1662  ret = AVERROR(ENOMEM);
1663  goto end;
1664  }
1665 
1667  if (!s->last_row) {
1668  ret = AVERROR(ENOMEM);
1669  goto end;
1670  }
1671 
1672  s->crow_size = w * 3 + 1;
1673  s->crow_buf = s->buffer + 15;
1674  s->zstream.avail_out = s->crow_size;
1675  s->zstream.next_out = s->crow_buf;
1676  s->image_buf = frame->data[0] + (avctx->height - y - 1) * frame->linesize[0] + x * 3;
1677  s->image_linesize =-frame->linesize[0];
1678  s->bpp = 3;
1679  s->pic_state = 0;
1680 
1681  while (left > 16) {
1682  ret = png_decode_idat(s, csize);
1683  if (ret < 0)
1684  goto end;
1685  left -= csize + 16;
1686  if (left > 16) {
1687  bytestream2_skip(gb, 4);
1688  csize = bytestream2_get_be32(gb);
1689  if (bytestream2_get_le32(gb) != MKTAG('I', 'D', 'A', 'T')) {
1690  ret = AVERROR_INVALIDDATA;
1691  goto end;
1692  }
1693  }
1694  }
1695 
1696  inflateEnd(&s->zstream);
1697  }
1698 
1700 
1702  if ((ret = av_frame_ref(s->last_picture.f, frame)) < 0)
1703  return ret;
1704 
1705  *got_frame = 1;
1706 end:
1707  inflateEnd(&s->zstream);
1708 
1709  if (ret < 0)
1710  return ret;
1711  return avpkt->size;
1712 }
1713 
1714 static void decode_flush(AVCodecContext *avctx)
1715 {
1716  PNGDecContext *s = avctx->priv_data;
1717 
1719 }
1720 
1721 #endif
1722 
1723 #if HAVE_THREADS
1724 static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
1725 {
1726  PNGDecContext *psrc = src->priv_data;
1727  PNGDecContext *pdst = dst->priv_data;
1728  int ret;
1729 
1730  if (dst == src)
1731  return 0;
1732 
1733  ff_thread_release_buffer(dst, &pdst->picture);
1734  if (psrc->picture.f->data[0] &&
1735  (ret = ff_thread_ref_frame(&pdst->picture, &psrc->picture)) < 0)
1736  return ret;
1737  if (CONFIG_APNG_DECODER && dst->codec_id == AV_CODEC_ID_APNG) {
1738  pdst->width = psrc->width;
1739  pdst->height = psrc->height;
1740  pdst->bit_depth = psrc->bit_depth;
1741  pdst->color_type = psrc->color_type;
1742  pdst->compression_type = psrc->compression_type;
1743  pdst->interlace_type = psrc->interlace_type;
1744  pdst->filter_type = psrc->filter_type;
1745  pdst->cur_w = psrc->cur_w;
1746  pdst->cur_h = psrc->cur_h;
1747  pdst->x_offset = psrc->x_offset;
1748  pdst->y_offset = psrc->y_offset;
1749  pdst->has_trns = psrc->has_trns;
1750  memcpy(pdst->transparent_color_be, psrc->transparent_color_be, sizeof(pdst->transparent_color_be));
1751 
1752  pdst->dispose_op = psrc->dispose_op;
1753 
1754  memcpy(pdst->palette, psrc->palette, sizeof(pdst->palette));
1755 
1756  pdst->hdr_state |= psrc->hdr_state;
1757 
1759  if (psrc->last_picture.f->data[0] &&
1760  (ret = ff_thread_ref_frame(&pdst->last_picture, &psrc->last_picture)) < 0)
1761  return ret;
1762 
1764  if (psrc->previous_picture.f->data[0] &&
1765  (ret = ff_thread_ref_frame(&pdst->previous_picture, &psrc->previous_picture)) < 0)
1766  return ret;
1767  }
1768 
1769  return 0;
1770 }
1771 #endif
1772 
1774 {
1775  PNGDecContext *s = avctx->priv_data;
1776 
1777  avctx->color_range = AVCOL_RANGE_JPEG;
1778 
1779  if (avctx->codec_id == AV_CODEC_ID_LSCR)
1780  avctx->pix_fmt = AV_PIX_FMT_BGR24;
1781 
1782  s->avctx = avctx;
1784  s->last_picture.f = av_frame_alloc();
1785  s->picture.f = av_frame_alloc();
1786  if (!s->previous_picture.f || !s->last_picture.f || !s->picture.f) {
1789  av_frame_free(&s->picture.f);
1790  return AVERROR(ENOMEM);
1791  }
1792 
1793  ff_pngdsp_init(&s->dsp);
1794 
1795  return 0;
1796 }
1797 
1799 {
1800  PNGDecContext *s = avctx->priv_data;
1801 
1806  ff_thread_release_buffer(avctx, &s->picture);
1807  av_frame_free(&s->picture.f);
1808  av_freep(&s->buffer);
1809  s->buffer_size = 0;
1810  av_freep(&s->last_row);
1811  s->last_row_size = 0;
1812  av_freep(&s->tmp_row);
1813  s->tmp_row_size = 0;
1814 
1815  return 0;
1816 }
1817 
1818 #if CONFIG_APNG_DECODER
1820  .name = "apng",
1821  .long_name = NULL_IF_CONFIG_SMALL("APNG (Animated Portable Network Graphics) image"),
1822  .type = AVMEDIA_TYPE_VIDEO,
1823  .id = AV_CODEC_ID_APNG,
1824  .priv_data_size = sizeof(PNGDecContext),
1825  .init = png_dec_init,
1826  .close = png_dec_end,
1827  .decode = decode_frame_apng,
1829  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS /*| AV_CODEC_CAP_DRAW_HORIZ_BAND*/,
1830  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE |
1832 };
1833 #endif
1834 
1835 #if CONFIG_PNG_DECODER
1837  .name = "png",
1838  .long_name = NULL_IF_CONFIG_SMALL("PNG (Portable Network Graphics) image"),
1839  .type = AVMEDIA_TYPE_VIDEO,
1840  .id = AV_CODEC_ID_PNG,
1841  .priv_data_size = sizeof(PNGDecContext),
1842  .init = png_dec_init,
1843  .close = png_dec_end,
1844  .decode = decode_frame_png,
1846  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS /*| AV_CODEC_CAP_DRAW_HORIZ_BAND*/,
1849 };
1850 #endif
1851 
1852 #if CONFIG_LSCR_DECODER
1854  .name = "lscr",
1855  .long_name = NULL_IF_CONFIG_SMALL("LEAD Screen Capture"),
1856  .type = AVMEDIA_TYPE_VIDEO,
1857  .id = AV_CODEC_ID_LSCR,
1858  .priv_data_size = sizeof(PNGDecContext),
1859  .init = png_dec_init,
1860  .close = png_dec_end,
1861  .decode = decode_frame_lscr,
1862  .flush = decode_flush,
1863  .capabilities = AV_CODEC_CAP_DR1 /*| AV_CODEC_CAP_DRAW_HORIZ_BAND*/,
1866 };
1867 #endif
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:167
static int decode_idat_chunk(AVCodecContext *avctx, PNGDecContext *s, uint32_t length, AVFrame *p)
Definition: pngdec.c:629
static int decode_fctl_chunk(AVCodecContext *avctx, PNGDecContext *s, uint32_t length)
Definition: pngdec.c:973
#define PNG_FILTER_VALUE_AVG
Definition: png.h:41
static void png_handle_row(PNGDecContext *s)
Definition: pngdec.c:334
ThreadFrame previous_picture
Definition: pngdec.c:57
#define NULL
Definition: coverity.c:32
int last_y_offset
Definition: pngdec.c:67
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane...
Definition: imgutils.c:76
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
void av_bprintf(AVBPrint *buf, const char *fmt,...)
Definition: bprint.c:94
This structure describes decoded (raw) audio or video data.
Definition: frame.h:300
int width
Definition: pngdec.c:63
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
static void flush(AVCodecContext *avctx)
unsigned int tmp_row_size
Definition: pngdec.c:88
8 bits gray, 8 bits alpha
Definition: pixfmt.h:143
misc image utilities
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
AVFrame * f
Definition: thread.h:35
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:68
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:104
const char * g
Definition: vf_curves.c:115
int pass_row_size
Definition: pngdec.c:94
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
AVDictionary * metadata
Definition: frame.h:210
uint8_t * tmp_row
Definition: pngdec.c:87
#define avpriv_request_sample(...)
PNGHeaderState
Definition: pngdec.c:42
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:1292
AVRational white_point[2]
CIE 1931 xy chromaticity coords of white point.
int num
Numerator.
Definition: rational.h:59
static int decode_text_chunk(PNGDecContext *s, uint32_t length, int compressed, AVDictionary **dict)
Definition: pngdec.c:513
int size
Definition: packet.h:356
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel...
Definition: avcodec.h:1036
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:36
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:867
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:133
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:70
enum PNGImageState pic_state
Definition: pngdec.c:62
int has_primaries
Flag indicating whether the display primaries (and white point) are set.
discard all
Definition: avcodec.h:235
Views are next to each other.
Definition: stereo3d.h:67
#define PNG_COLOR_TYPE_RGB
Definition: png.h:33
static void error(const char *err)
void(* add_bytes_l2)(uint8_t *dst, uint8_t *src1, uint8_t *src2, int w)
Definition: pngdsp.h:28
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them.reget_buffer() and buffer age optimizations no longer work.*The contents of buffers must not be written to after ff_thread_report_progress() has been called on them.This includes draw_edges().Porting codecs to frame threading
#define PNG_COLOR_TYPE_GRAY_ALPHA
Definition: png.h:35
AVCodec.
Definition: avcodec.h:2600
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
#define PNG_COLOR_TYPE_PALETTE
Definition: png.h:32
int av_bprint_finalize(AVBPrint *buf, char **ret_str)
Finalize a print buffer.
Definition: bprint.c:235
int filter_type
Definition: pngdec.c:74
void ff_add_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top, int w, int bpp)
Definition: pngdec.c:187
#define AV_DICT_DONT_STRDUP_KEY
Take ownership of a key that&#39;s been allocated with av_malloc() or another memory allocation function...
Definition: dict.h:73
#define PNG_FILTER_VALUE_PAETH
Definition: png.h:42
enum AVDiscard skip_frame
Skip decoding for selected frames.
Definition: avcodec.h:2132
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
static int percent_missing(PNGDecContext *s)
Definition: pngdec.c:324
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:40
int y_offset
Definition: pngdec.c:66
uint8_t
#define av_cold
Definition: attributes.h:82
#define av_malloc(s)
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
#define PNG_COLOR_TYPE_RGB_ALPHA
Definition: png.h:34
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
Stereo 3D type: this structure describes how two videos are packed within a single video surface...
Definition: stereo3d.h:176
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:1743
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
#define f(width, name)
Definition: cbs_vp9.c:255
static av_cold int end(AVCodecContext *avctx)
Definition: avrndec.c:90
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
Multithreading support functions.
AVCodec ff_apng_decoder
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
Definition: pixfmt.h:205
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:444
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:758
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:87
Public header for CRC hash function implementation.
static int decode_phys_chunk(AVCodecContext *avctx, PNGDecContext *s)
Definition: pngdec.c:613
Structure to hold side data for an AVFrame.
Definition: frame.h:206
uint8_t * data
Definition: packet.h:355
const uint8_t * buffer
Definition: bytestream.h:34
uint32_t tag
Definition: movenc.c:1532
int ff_thread_ref_frame(ThreadFrame *dst, ThreadFrame *src)
Definition: utils.c:1839
#define ff_dlog(a,...)
AVDictionary * metadata
metadata.
Definition: frame.h:586
static int decode_iccp_chunk(PNGDecContext *s, int length, AVFrame *f)
Definition: pngdec.c:855
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:447
AVCodec ff_lscr_decoder
ptrdiff_t size
Definition: opengl_enc.c:100
unsigned int last_row_size
Definition: pngdec.c:86
#define AV_WB16(p, v)
Definition: intreadwrite.h:405
int cur_h
Definition: pngdec.c:64
#define av_log(a,...)
#define FF_CODEC_CAP_ALLOCATE_PROGRESS
Definition: internal.h:75
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:388
static int decode_plte_chunk(AVCodecContext *avctx, PNGDecContext *s, uint32_t length)
Definition: pngdec.c:784
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:154
#define U(x)
Definition: vp56_arith.h:37
#define src
Definition: vp8dsp.c:254
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
int width
Definition: frame.h:358
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static const uint8_t png_pass_dsp_mask[NB_PASSES]
Definition: pngdec.c:110
int flags
Additional information about the frame packing.
Definition: stereo3d.h:185
16 bits gray, 16 bits alpha (big-endian)
Definition: pixfmt.h:212
#define AV_BPRINT_SIZE_UNLIMITED
void ff_thread_release_buffer(AVCodecContext *avctx, ThreadFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
static int decode_frame_common(AVCodecContext *avctx, PNGDecContext *s, AVFrame *p, AVPacket *avpkt)
Definition: pngdec.c:1180
static const uint16_t mask[17]
Definition: lzw.c:38
#define OP_SUB(x, s, l)
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:164
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
static void handle_p_frame_png(PNGDecContext *s, AVFrame *p)
Definition: pngdec.c:1047
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max)
Definition: bprint.c:69
uint8_t * crow_buf
Definition: pngdec.c:84
const char * r
Definition: vf_curves.c:114
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
int pass
Definition: pngdec.c:91
int ff_png_get_nb_channels(int color_type)
Definition: png.c:49
ThreadFrame picture
Definition: pngdec.c:59
int height
Definition: pngdec.c:63
#define av_fourcc2str(fourcc)
Definition: avutil.h:348
#define PNGSIG
Definition: png.h:47
simple assert() macros that are a bit more flexible than ISO C assert().
GLsizei GLsizei * length
Definition: opengl_enc.c:114
const char * name
Name of the codec implementation.
Definition: avcodec.h:2607
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_RL24
Definition: bytestream.h:87
int bits_per_pixel
Definition: pngdec.c:76
GetByteContext gb
Definition: pngdec.c:56
#define FFMAX(a, b)
Definition: common.h:94
#define NB_PASSES
Definition: png.h:45
#define fail()
Definition: checkasm.h:123
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: avcodec.h:461
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:800
uint8_t blend_op
Definition: pngdec.c:68
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:361
#define ONLY_IF_THREADS_ENABLED(x)
Define a function with only the non-default version specified.
Definition: internal.h:225
AVStereo3D * av_stereo3d_create_side_data(AVFrame *frame)
Allocate a complete AVFrameSideData and add it to the frame.
Definition: stereo3d.c:33
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames.The frames must then be freed with ff_thread_release_buffer().Otherwise decode directly into the user-supplied frames.Call ff_thread_report_progress() after some part of the current picture has decoded.A good place to put this is where draw_horiz_band() is called-add this if it isn't called anywhere
z_stream zstream
Definition: pngdec.c:96
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:282
#define b
Definition: input.c:41
AVMasteringDisplayMetadata * av_mastering_display_metadata_create_side_data(AVFrame *frame)
Allocate a complete AVMasteringDisplayMetadata and add it to the frame.
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:383
alias for AV_PIX_FMT_YA8
Definition: pixfmt.h:146
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:1786
#define FFMIN(a, b)
Definition: common.h:96
#define PNG_FILTER_VALUE_SUB
Definition: png.h:39
uint32_t palette[256]
Definition: pngdec.c:83
#define AV_DICT_DONT_STRDUP_VAL
Take ownership of a value that&#39;s been allocated with av_malloc() or another memory allocation functio...
Definition: dict.h:76
#define PNG_COLOR_TYPE_GRAY
Definition: png.h:31
static void png_filter_row(PNGDSPContext *dsp, uint8_t *dst, int filter_type, uint8_t *src, uint8_t *last, int size, int bpp)
Definition: pngdec.c:253
int width
picture width / height.
Definition: avcodec.h:830
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call ff_thread_finish_setup() afterwards.If some code can't be moved
uint8_t w
Definition: llviddspenc.c:38
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
#define s(width, name)
Definition: cbs_vp9.c:257
uint8_t * last_row
Definition: pngdec.c:85
The data contains an ICC profile as an opaque octet buffer following the format described by ISO 1507...
Definition: frame.h:143
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: avcodec.h:1797
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:69
AVCodecContext * avctx
Definition: pngdec.c:54
void av_bprint_get_buffer(AVBPrint *buf, unsigned size, unsigned char **mem, unsigned *actual_size)
Allocate bytes in the buffer for external use.
Definition: bprint.c:218
av_cold void ff_pngdsp_init(PNGDSPContext *dsp)
Definition: pngdsp.c:43
static int decode_zbuf(AVBPrint *bp, const uint8_t *data, const uint8_t *data_end)
Definition: pngdec.c:445
int channels
Definition: pngdec.c:75
the normal 2^n-1 "JPEG" YUV ranges
Definition: pixfmt.h:535
if(ret)
static int decode_ihdr_chunk(AVCodecContext *avctx, PNGDecContext *s, uint32_t length)
Definition: pngdec.c:561
static uint8_t * iso88591_to_utf8(const uint8_t *in, size_t size_in)
Definition: pngdec.c:489
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:188
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:373
static av_cold int png_dec_init(AVCodecContext *avctx)
Definition: pngdec.c:1773
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:180
int discard_damaged_percentage
The percentage of damaged samples to discard a frame.
Definition: avcodec.h:2464
Libavcodec external API header.
enum PNGHeaderState hdr_state
Definition: pngdec.c:61
int buffer_size
Definition: pngdec.c:90
static int skip_tag(AVIOContext *in, int32_t tag_name)
Definition: ismindex.c:132
enum AVCodecID codec_id
Definition: avcodec.h:667
#define PNG_FILTER_VALUE_UP
Definition: png.h:40
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:331
#define PNG_FILTER_TYPE_LOCO
Definition: png.h:37
uint8_t last_dispose_op
Definition: pngdec.c:69
#define abs(x)
Definition: cuda_runtime.h:35
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
int debug
debug
Definition: avcodec.h:1742
main external API structure.
Definition: avcodec.h:657
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> (&#39;D&#39;<<24) + (&#39;C&#39;<<16) + (&#39;B&#39;<<8) + &#39;A&#39;).
Definition: avcodec.h:682
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1854
uint8_t * data
Definition: frame.h:208
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2]...the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so...,+,-,+,-,+,+,-,+,-,+,...hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32-hcoeff[1]-hcoeff[2]-...a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2}an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||.........intra?||||:Block01:yes no||||:Block02:.................||||:Block03::y DC::ref index:||||:Block04::cb DC::motion x:||||.........:cr DC::motion y:||||.................|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------------------------------|||Y subbands||Cb subbands||Cr subbands||||------||------||------|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||------||------||------||||------||------||------|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||------||------||------||||------||------||------|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||------||------||------||||------||------||------|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------------------------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction------------|\Dequantization-------------------\||Reference frames|\IDWT|--------------|Motion\|||Frame 0||Frame 1||Compensation.OBMC v-------|--------------|--------------.\------> Frame n output Frame Frame<----------------------------------/|...|-------------------Range Coder:============Binary Range Coder:-------------------The implemented range coder is an adapted version based upon"Range encoding: an algorithm for removing redundancy from a digitised message."by G.N.N.Martin.The symbols encoded by the Snow range coder are bits(0|1).The associated probabilities are not fix but change depending on the symbol mix seen so far.bit seen|new state---------+-----------------------------------------------0|256-state_transition_table[256-old_state];1|state_transition_table[old_state];state_transition_table={0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:-------------------------FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1.the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:206
int interlace_type
Definition: pngdec.c:73
PNGImageState
Definition: pngdec.c:47
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have update_thread_context() run it in the next thread.Add AV_CODEC_CAP_FRAME_THREADS to the codec capabilities.There will be very little speed gain at this point but it should work.If there are inter-frame dependencies
const uint8_t ff_png_pass_ymask[NB_PASSES]
Definition: png.c:25
int image_linesize
Definition: pngdec.c:82
int extradata_size
Definition: avcodec.h:759
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31))))#define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac){}void ff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map){AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);return NULL;}return ac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;}int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){int use_generic=1;int len=in->nb_samples;int p;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
AVFrameSideData * av_frame_new_side_data(AVFrame *frame, enum AVFrameSideDataType type, int size)
Add a new side data to a frame.
Definition: frame.c:727
#define FF_COMPLIANCE_NORMAL
Definition: avcodec.h:1723
Y , 16bpp, big-endian.
Definition: pixfmt.h:97
static void inflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:198
Rational number (pair of numerator and denominator).
Definition: rational.h:58
Mastering display metadata capable of representing the color volume of the display used to master the...
int cur_w
Definition: pngdec.c:64
uint8_t transparent_color_be[6]
Definition: pngdec.c:79
#define OP_AVG(x, s, l)
#define AV_EF_IGNORE_ERR
ignore errors and continue
Definition: avcodec.h:1799
#define AV_EF_CRCCHECK
Verify checksums embedded in the bitstream (could be of either encoded or decoded data...
Definition: avcodec.h:1794
uint8_t * image_buf
Definition: pngdec.c:81
uint8_t dispose_op
Definition: pngdec.c:68
AVRational display_primaries[3][2]
CIE 1931 xy chromaticity coords of color primaries (r, g, b order).
uint8_t pixel
Definition: tiny_ssim.c:42
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:554
void avpriv_report_missing_feature(void *avc, const char *msg,...) av_printf_format(2
Log a generic warning message about a missing feature.
int last_x_offset
Definition: pngdec.c:67
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:314
#define FAST_DIV255(x)
Definition: pngdec.c:1065
static int handle_p_frame_apng(AVCodecContext *avctx, PNGDecContext *s, AVFrame *p)
Definition: pngdec.c:1067
#define YUV2RGB(NAME, TYPE)
Definition: pngdec.c:310
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
static const uint8_t png_pass_mask[NB_PASSES]
Definition: pngdec.c:100
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb...
Definition: pixfmt.h:76
Y , 8bpp.
Definition: pixfmt.h:74
static av_cold int png_dec_end(AVCodecContext *avctx)
Definition: pngdec.c:1798
void(* add_paeth_prediction)(uint8_t *dst, uint8_t *src, uint8_t *top, int w, int bpp)
Definition: pngdsp.h:33
common internal api header.
static void handle_small_bpp(PNGDecContext *s, AVFrame *p)
Definition: pngdec.c:900
#define FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM
The decoder extracts and fills its parameters even if the frame is skipped due to the skip_frame sett...
Definition: internal.h:60
#define PNG_FILTER_VALUE_NONE
Definition: png.h:38
static int decode_trns_chunk(AVCodecContext *avctx, PNGDecContext *s, uint32_t length)
Definition: pngdec.c:807
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:102
int last_w
Definition: pngdec.c:65
void av_fast_padded_mallocz(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_padded_malloc except that buffer will always be 0-initialized after call...
Definition: utils.c:82
static const uint8_t png_pass_dsp_ymask[NB_PASSES]
Definition: pngdec.c:105
Stereoscopic video.
int den
Denominator.
Definition: rational.h:60
void ff_png_zfree(void *opaque, void *ptr)
Definition: png.c:44
void * priv_data
Definition: avcodec.h:684
static int png_decode_idat(PNGDecContext *s, int length)
Definition: pngdec.c:415
uint8_t * buffer
Definition: pngdec.c:89
#define av_free(p)
#define FF_DEBUG_STARTCODE
Definition: avcodec.h:1756
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:378
int row_size
Definition: pngdec.c:93
APNG common header.
static av_always_inline int bytestream2_seek(GetByteContext *g, int offset, int whence)
Definition: bytestream.h:208
PNGDSPContext dsp
Definition: pngdec.c:53
int compression_type
Definition: pngdec.c:72
int last_h
Definition: pngdec.c:65
int ff_png_pass_row_size(int pass, int bits_per_pixel, int width)
Definition: png.c:62
int height
Definition: frame.h:358
FILE * out
Definition: movenc.c:54
int bit_depth
Definition: pngdec.c:70
#define av_freep(p)
int color_type
Definition: pngdec.c:71
ThreadFrame last_picture
Definition: pngdec.c:58
#define av_malloc_array(a, b)
static void png_put_interlaced_row(uint8_t *dst, int width, int bits_per_pixel, int pass, int color_type, const uint8_t *src)
Definition: pngdec.c:117
#define FFSWAP(type, a, b)
Definition: common.h:99
int crow_size
Definition: pngdec.c:92
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2465
static void decode_flush(AVCodecContext *avctx)
Definition: agm.c:1261
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
int x_offset
Definition: pngdec.c:66
#define MKTAG(a, b, c, d)
Definition: common.h:396
void * ff_png_zalloc(void *opaque, unsigned int items, unsigned int size)
Definition: png.c:39
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:87
This structure stores compressed data.
Definition: packet.h:332
int has_trns
Definition: pngdec.c:78
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:640
mode
Use these values in ebur128_init (or&#39;ed).
Definition: ebur128.h:83
uint32_t AVCRC
Definition: crc.h:47
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:405
int strict_std_compliance
strictly follow the standard (MPEG-4, ...).
Definition: avcodec.h:1720
AVCodec ff_png_decoder
Predicted.
Definition: avutil.h:275
#define UNROLL_FILTER(op)
Definition: pngdec.c:238
#define MNGSIG
Definition: png.h:48