FFmpeg
tiff.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2006 Konstantin Shishkov
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * TIFF image decoder
24  * @author Konstantin Shishkov
25  */
26 
27 #include "config.h"
28 #if CONFIG_ZLIB
29 #include <zlib.h>
30 #endif
31 #if CONFIG_LZMA
32 #define LZMA_API_STATIC
33 #include <lzma.h>
34 #endif
35 
36 #include "libavutil/attributes.h"
37 #include "libavutil/avstring.h"
38 #include "libavutil/error.h"
39 #include "libavutil/intreadwrite.h"
40 #include "libavutil/imgutils.h"
41 #include "libavutil/opt.h"
42 #include "avcodec.h"
43 #include "bytestream.h"
44 #include "faxcompr.h"
45 #include "internal.h"
46 #include "lzw.h"
47 #include "mathops.h"
48 #include "tiff.h"
49 #include "tiff_data.h"
50 #include "mjpegdec.h"
51 #include "thread.h"
52 #include "get_bits.h"
53 
54 typedef struct TiffContext {
55  AVClass *class;
58 
59  /* JPEG decoding for DNG */
60  AVCodecContext *avctx_mjpeg; // wrapper context for MJPEG
61  AVPacket *jpkt; // encoded JPEG tile
62  AVFrame *jpgframe; // decoded JPEG tile
63 
65  uint16_t get_page;
67 
69  int width, height;
70  unsigned int bpp, bppcount;
71  uint32_t palette[256];
73  int le;
76  int planar;
77  int subsampling[2];
78  int fax_opts;
79  int predictor;
81  uint32_t res[4];
83  unsigned last_tag;
84 
85  int is_bayer;
87  unsigned black_level;
88  unsigned white_level;
89  uint16_t dng_lut[65536];
90 
91  uint32_t sub_ifd;
92  uint16_t cur_page;
93 
94  int strips, rps, sstype;
95  int sot;
98 
99  /* Tile support */
100  int is_tiled;
104 
105  int is_jpeg;
106 
110  unsigned int yuv_line_size;
112  unsigned int fax_buffer_size;
113 
116 } TiffContext;
117 
118 static void tiff_set_type(TiffContext *s, enum TiffType tiff_type) {
119  if (s->tiff_type < tiff_type) // Prioritize higher-valued entries
120  s->tiff_type = tiff_type;
121 }
122 
123 static void free_geotags(TiffContext *const s)
124 {
125  int i;
126  for (i = 0; i < s->geotag_count; i++) {
127  if (s->geotags[i].val)
128  av_freep(&s->geotags[i].val);
129  }
130  av_freep(&s->geotags);
131  s->geotag_count = 0;
132 }
133 
134 #define RET_GEOKEY(TYPE, array, element)\
135  if (key >= TIFF_##TYPE##_KEY_ID_OFFSET &&\
136  key - TIFF_##TYPE##_KEY_ID_OFFSET < FF_ARRAY_ELEMS(tiff_##array##_name_type_map))\
137  return tiff_##array##_name_type_map[key - TIFF_##TYPE##_KEY_ID_OFFSET].element;
138 
139 static const char *get_geokey_name(int key)
140 {
141  RET_GEOKEY(VERT, vert, name);
142  RET_GEOKEY(PROJ, proj, name);
143  RET_GEOKEY(GEOG, geog, name);
144  RET_GEOKEY(CONF, conf, name);
145 
146  return NULL;
147 }
148 
149 static int get_geokey_type(int key)
150 {
151  RET_GEOKEY(VERT, vert, type);
152  RET_GEOKEY(PROJ, proj, type);
153  RET_GEOKEY(GEOG, geog, type);
154  RET_GEOKEY(CONF, conf, type);
155 
156  return AVERROR_INVALIDDATA;
157 }
158 
159 static int cmp_id_key(const void *id, const void *k)
160 {
161  return *(const int*)id - ((const TiffGeoTagKeyName*)k)->key;
162 }
163 
164 static const char *search_keyval(const TiffGeoTagKeyName *keys, int n, int id)
165 {
166  TiffGeoTagKeyName *r = bsearch(&id, keys, n, sizeof(keys[0]), cmp_id_key);
167  if(r)
168  return r->name;
169 
170  return NULL;
171 }
172 
173 static char *get_geokey_val(int key, int val)
174 {
175  char *ap;
176 
178  return av_strdup("undefined");
180  return av_strdup("User-Defined");
181 
182 #define RET_GEOKEY_VAL(TYPE, array)\
183  if (val >= TIFF_##TYPE##_OFFSET &&\
184  val - TIFF_##TYPE##_OFFSET < FF_ARRAY_ELEMS(tiff_##array##_codes))\
185  return av_strdup(tiff_##array##_codes[val - TIFF_##TYPE##_OFFSET]);
186 
187  switch (key) {
189  RET_GEOKEY_VAL(GT_MODEL_TYPE, gt_model_type);
190  break;
192  RET_GEOKEY_VAL(GT_RASTER_TYPE, gt_raster_type);
193  break;
197  RET_GEOKEY_VAL(LINEAR_UNIT, linear_unit);
198  break;
201  RET_GEOKEY_VAL(ANGULAR_UNIT, angular_unit);
202  break;
204  RET_GEOKEY_VAL(GCS_TYPE, gcs_type);
205  RET_GEOKEY_VAL(GCSE_TYPE, gcse_type);
206  break;
208  RET_GEOKEY_VAL(GEODETIC_DATUM, geodetic_datum);
209  RET_GEOKEY_VAL(GEODETIC_DATUM_E, geodetic_datum_e);
210  break;
212  RET_GEOKEY_VAL(ELLIPSOID, ellipsoid);
213  break;
215  RET_GEOKEY_VAL(PRIME_MERIDIAN, prime_meridian);
216  break;
219  if(ap) return ap;
220  break;
223  if(ap) return ap;
224  break;
226  RET_GEOKEY_VAL(COORD_TRANS, coord_trans);
227  break;
229  RET_GEOKEY_VAL(VERT_CS, vert_cs);
230  RET_GEOKEY_VAL(ORTHO_VERT_CS, ortho_vert_cs);
231  break;
232 
233  }
234 
235  ap = av_malloc(14);
236  if (ap)
237  snprintf(ap, 14, "Unknown-%d", val);
238  return ap;
239 }
240 
241 static char *doubles2str(double *dp, int count, const char *sep)
242 {
243  int i;
244  char *ap, *ap0;
245  uint64_t component_len;
246  if (!sep) sep = ", ";
247  component_len = 24LL + strlen(sep);
248  if (count >= (INT_MAX - 1)/component_len)
249  return NULL;
250  ap = av_malloc(component_len * count + 1);
251  if (!ap)
252  return NULL;
253  ap0 = ap;
254  ap[0] = '\0';
255  for (i = 0; i < count; i++) {
256  unsigned l = snprintf(ap, component_len, "%.15g%s", dp[i], sep);
257  if(l >= component_len) {
258  av_free(ap0);
259  return NULL;
260  }
261  ap += l;
262  }
263  ap0[strlen(ap0) - strlen(sep)] = '\0';
264  return ap0;
265 }
266 
267 static int add_metadata(int count, int type,
268  const char *name, const char *sep, TiffContext *s, AVFrame *frame)
269 {
270  switch(type) {
271  case TIFF_DOUBLE: return ff_tadd_doubles_metadata(count, name, sep, &s->gb, s->le, &frame->metadata);
272  case TIFF_SHORT : return ff_tadd_shorts_metadata(count, name, sep, &s->gb, s->le, 0, &frame->metadata);
273  case TIFF_STRING: return ff_tadd_string_metadata(count, name, &s->gb, s->le, &frame->metadata);
274  default : return AVERROR_INVALIDDATA;
275  };
276 }
277 
278 /**
279  * Map stored raw sensor values into linear reference values (see: DNG Specification - Chapter 5)
280  */
281 static uint16_t av_always_inline dng_process_color16(uint16_t value,
282  const uint16_t *lut,
283  uint16_t black_level,
284  float scale_factor)
285 {
286  float value_norm;
287 
288  // Lookup table lookup
289  if (lut)
290  value = lut[value];
291 
292  // Black level subtraction
293  value = av_clip_uint16_c((unsigned)value - black_level);
294 
295  // Color scaling
296  value_norm = (float)value * scale_factor;
297 
298  value = av_clip_uint16_c(value_norm * 65535);
299 
300  return value;
301 }
302 
303 static uint16_t av_always_inline dng_process_color8(uint16_t value,
304  const uint16_t *lut,
305  uint16_t black_level,
306  float scale_factor)
307 {
308  return dng_process_color16(value, lut, black_level, scale_factor) >> 8;
309 }
310 
311 static void av_always_inline dng_blit(TiffContext *s, uint8_t *dst, int dst_stride,
312  const uint8_t *src, int src_stride, int width, int height,
313  int is_single_comp, int is_u16)
314 {
315  int line, col;
316  float scale_factor;
317 
318  scale_factor = 1.0f / (s->white_level - s->black_level);
319 
320  if (is_single_comp) {
321  if (!is_u16)
322  return; /* <= 8bpp unsupported */
323 
324  /* Image is double the width and half the height we need, each row comprises 2 rows of the output
325  (split vertically in the middle). */
326  for (line = 0; line < height / 2; line++) {
327  uint16_t *dst_u16 = (uint16_t *)dst;
328  uint16_t *src_u16 = (uint16_t *)src;
329 
330  /* Blit first half of input row row to initial row of output */
331  for (col = 0; col < width; col++)
332  *dst_u16++ = dng_process_color16(*src_u16++, s->dng_lut, s->black_level, scale_factor);
333 
334  /* Advance the destination pointer by a row (source pointer remains in the same place) */
335  dst += dst_stride * sizeof(uint16_t);
336  dst_u16 = (uint16_t *)dst;
337 
338  /* Blit second half of input row row to next row of output */
339  for (col = 0; col < width; col++)
340  *dst_u16++ = dng_process_color16(*src_u16++, s->dng_lut, s->black_level, scale_factor);
341 
342  dst += dst_stride * sizeof(uint16_t);
343  src += src_stride * sizeof(uint16_t);
344  }
345  } else {
346  /* Input and output image are the same size and the MJpeg decoder has done per-component
347  deinterleaving, so blitting here is straightforward. */
348  if (is_u16) {
349  for (line = 0; line < height; line++) {
350  uint16_t *dst_u16 = (uint16_t *)dst;
351  uint16_t *src_u16 = (uint16_t *)src;
352 
353  for (col = 0; col < width; col++)
354  *dst_u16++ = dng_process_color16(*src_u16++, s->dng_lut, s->black_level, scale_factor);
355 
356  dst += dst_stride * sizeof(uint16_t);
357  src += src_stride * sizeof(uint16_t);
358  }
359  } else {
360  for (line = 0; line < height; line++) {
361  uint8_t *dst_u8 = dst;
362  const uint8_t *src_u8 = src;
363 
364  for (col = 0; col < width; col++)
365  *dst_u8++ = dng_process_color8(*src_u8++, s->dng_lut, s->black_level, scale_factor);
366 
367  dst += dst_stride;
368  src += src_stride;
369  }
370  }
371  }
372 }
373 
375  unsigned int bpp, uint8_t* dst,
376  int usePtr, const uint8_t *src,
377  uint8_t c, int width, int offset)
378 {
379  switch (bpp) {
380  case 1:
381  while (--width >= 0) {
382  dst[(width+offset)*8+7] = (usePtr ? src[width] : c) & 0x1;
383  dst[(width+offset)*8+6] = (usePtr ? src[width] : c) >> 1 & 0x1;
384  dst[(width+offset)*8+5] = (usePtr ? src[width] : c) >> 2 & 0x1;
385  dst[(width+offset)*8+4] = (usePtr ? src[width] : c) >> 3 & 0x1;
386  dst[(width+offset)*8+3] = (usePtr ? src[width] : c) >> 4 & 0x1;
387  dst[(width+offset)*8+2] = (usePtr ? src[width] : c) >> 5 & 0x1;
388  dst[(width+offset)*8+1] = (usePtr ? src[width] : c) >> 6 & 0x1;
389  dst[(width+offset)*8+0] = (usePtr ? src[width] : c) >> 7;
390  }
391  break;
392  case 2:
393  while (--width >= 0) {
394  dst[(width+offset)*4+3] = (usePtr ? src[width] : c) & 0x3;
395  dst[(width+offset)*4+2] = (usePtr ? src[width] : c) >> 2 & 0x3;
396  dst[(width+offset)*4+1] = (usePtr ? src[width] : c) >> 4 & 0x3;
397  dst[(width+offset)*4+0] = (usePtr ? src[width] : c) >> 6;
398  }
399  break;
400  case 4:
401  while (--width >= 0) {
402  dst[(width+offset)*2+1] = (usePtr ? src[width] : c) & 0xF;
403  dst[(width+offset)*2+0] = (usePtr ? src[width] : c) >> 4;
404  }
405  break;
406  case 10:
407  case 12:
408  case 14: {
409  uint16_t *dst16 = (uint16_t *)dst;
410  int is_dng = (s->tiff_type == TIFF_TYPE_DNG || s->tiff_type == TIFF_TYPE_CINEMADNG);
411  uint8_t shift = is_dng ? 0 : 16 - bpp;
412  GetBitContext gb;
413 
414  init_get_bits8(&gb, src, width);
415  for (int i = 0; i < s->width; i++) {
416  dst16[i] = get_bits(&gb, bpp) << shift;
417  }
418  }
419  break;
420  default:
421  if (usePtr) {
422  memcpy(dst + offset, src, width);
423  } else {
424  memset(dst + offset, c, width);
425  }
426  }
427 }
428 
429 static int deinvert_buffer(TiffContext *s, const uint8_t *src, int size)
430 {
431  int i;
432 
433  av_fast_padded_malloc(&s->deinvert_buf, &s->deinvert_buf_size, size);
434  if (!s->deinvert_buf)
435  return AVERROR(ENOMEM);
436  for (i = 0; i < size; i++)
437  s->deinvert_buf[i] = ff_reverse[src[i]];
438 
439  return 0;
440 }
441 
442 static void unpack_gray(TiffContext *s, AVFrame *p,
443  const uint8_t *src, int lnum, int width, int bpp)
444 {
445  GetBitContext gb;
446  uint16_t *dst = (uint16_t *)(p->data[0] + lnum * p->linesize[0]);
447 
448  init_get_bits8(&gb, src, width);
449 
450  for (int i = 0; i < s->width; i++) {
451  dst[i] = get_bits(&gb, bpp);
452  }
453 }
454 
455 static void unpack_yuv(TiffContext *s, AVFrame *p,
456  const uint8_t *src, int lnum)
457 {
458  int i, j, k;
459  int w = (s->width - 1) / s->subsampling[0] + 1;
460  uint8_t *pu = &p->data[1][lnum / s->subsampling[1] * p->linesize[1]];
461  uint8_t *pv = &p->data[2][lnum / s->subsampling[1] * p->linesize[2]];
462  if (s->width % s->subsampling[0] || s->height % s->subsampling[1]) {
463  for (i = 0; i < w; i++) {
464  for (j = 0; j < s->subsampling[1]; j++)
465  for (k = 0; k < s->subsampling[0]; k++)
466  p->data[0][FFMIN(lnum + j, s->height-1) * p->linesize[0] +
467  FFMIN(i * s->subsampling[0] + k, s->width-1)] = *src++;
468  *pu++ = *src++;
469  *pv++ = *src++;
470  }
471  }else{
472  for (i = 0; i < w; i++) {
473  for (j = 0; j < s->subsampling[1]; j++)
474  for (k = 0; k < s->subsampling[0]; k++)
475  p->data[0][(lnum + j) * p->linesize[0] +
476  i * s->subsampling[0] + k] = *src++;
477  *pu++ = *src++;
478  *pv++ = *src++;
479  }
480  }
481 }
482 
483 #if CONFIG_ZLIB
484 static int tiff_uncompress(uint8_t *dst, unsigned long *len, const uint8_t *src,
485  int size)
486 {
487  z_stream zstream = { 0 };
488  int zret;
489 
490  zstream.next_in = src;
491  zstream.avail_in = size;
492  zstream.next_out = dst;
493  zstream.avail_out = *len;
494  zret = inflateInit(&zstream);
495  if (zret != Z_OK) {
496  av_log(NULL, AV_LOG_ERROR, "Inflate init error: %d\n", zret);
497  return zret;
498  }
499  zret = inflate(&zstream, Z_SYNC_FLUSH);
500  inflateEnd(&zstream);
501  *len = zstream.total_out;
502  return zret == Z_STREAM_END ? Z_OK : zret;
503 }
504 
505 static int tiff_unpack_zlib(TiffContext *s, AVFrame *p, uint8_t *dst, int stride,
506  const uint8_t *src, int size, int width, int lines,
507  int strip_start, int is_yuv)
508 {
509  uint8_t *zbuf;
510  unsigned long outlen;
511  int ret, line;
512  outlen = width * lines;
513  zbuf = av_malloc(outlen);
514  if (!zbuf)
515  return AVERROR(ENOMEM);
516  if (s->fill_order) {
517  if ((ret = deinvert_buffer(s, src, size)) < 0) {
518  av_free(zbuf);
519  return ret;
520  }
521  src = s->deinvert_buf;
522  }
523  ret = tiff_uncompress(zbuf, &outlen, src, size);
524  if (ret != Z_OK) {
525  av_log(s->avctx, AV_LOG_ERROR,
526  "Uncompressing failed (%lu of %lu) with error %d\n", outlen,
527  (unsigned long)width * lines, ret);
528  av_free(zbuf);
529  return AVERROR_UNKNOWN;
530  }
531  src = zbuf;
532  for (line = 0; line < lines; line++) {
533  if (s->bpp < 8 && s->avctx->pix_fmt == AV_PIX_FMT_PAL8) {
534  horizontal_fill(s, s->bpp, dst, 1, src, 0, width, 0);
535  } else {
536  memcpy(dst, src, width);
537  }
538  if (is_yuv) {
539  unpack_yuv(s, p, dst, strip_start + line);
540  line += s->subsampling[1] - 1;
541  }
542  dst += stride;
543  src += width;
544  }
545  av_free(zbuf);
546  return 0;
547 }
548 #endif
549 
550 #if CONFIG_LZMA
551 static int tiff_uncompress_lzma(uint8_t *dst, uint64_t *len, const uint8_t *src,
552  int size)
553 {
554  lzma_stream stream = LZMA_STREAM_INIT;
555  lzma_ret ret;
556 
557  stream.next_in = (uint8_t *)src;
558  stream.avail_in = size;
559  stream.next_out = dst;
560  stream.avail_out = *len;
561  ret = lzma_stream_decoder(&stream, UINT64_MAX, 0);
562  if (ret != LZMA_OK) {
563  av_log(NULL, AV_LOG_ERROR, "LZMA init error: %d\n", ret);
564  return ret;
565  }
566  ret = lzma_code(&stream, LZMA_RUN);
567  lzma_end(&stream);
568  *len = stream.total_out;
569  return ret == LZMA_STREAM_END ? LZMA_OK : ret;
570 }
571 
572 static int tiff_unpack_lzma(TiffContext *s, AVFrame *p, uint8_t *dst, int stride,
573  const uint8_t *src, int size, int width, int lines,
574  int strip_start, int is_yuv)
575 {
576  uint64_t outlen = width * (uint64_t)lines;
577  int ret, line;
578  uint8_t *buf = av_malloc(outlen);
579  if (!buf)
580  return AVERROR(ENOMEM);
581  if (s->fill_order) {
582  if ((ret = deinvert_buffer(s, src, size)) < 0) {
583  av_free(buf);
584  return ret;
585  }
586  src = s->deinvert_buf;
587  }
588  ret = tiff_uncompress_lzma(buf, &outlen, src, size);
589  if (ret != LZMA_OK) {
590  av_log(s->avctx, AV_LOG_ERROR,
591  "Uncompressing failed (%"PRIu64" of %"PRIu64") with error %d\n", outlen,
592  (uint64_t)width * lines, ret);
593  av_free(buf);
594  return AVERROR_UNKNOWN;
595  }
596  src = buf;
597  for (line = 0; line < lines; line++) {
598  if (s->bpp < 8 && s->avctx->pix_fmt == AV_PIX_FMT_PAL8) {
599  horizontal_fill(s, s->bpp, dst, 1, src, 0, width, 0);
600  } else {
601  memcpy(dst, src, width);
602  }
603  if (is_yuv) {
604  unpack_yuv(s, p, dst, strip_start + line);
605  line += s->subsampling[1] - 1;
606  }
607  dst += stride;
608  src += width;
609  }
610  av_free(buf);
611  return 0;
612 }
613 #endif
614 
615 static int tiff_unpack_fax(TiffContext *s, uint8_t *dst, int stride,
616  const uint8_t *src, int size, int width, int lines)
617 {
618  int i, ret = 0;
619  int line;
620  uint8_t *src2;
621 
622  av_fast_padded_malloc(&s->fax_buffer, &s->fax_buffer_size, size);
623  src2 = s->fax_buffer;
624 
625  if (!src2) {
626  av_log(s->avctx, AV_LOG_ERROR,
627  "Error allocating temporary buffer\n");
628  return AVERROR(ENOMEM);
629  }
630 
631  if (!s->fill_order) {
632  memcpy(src2, src, size);
633  } else {
634  for (i = 0; i < size; i++)
635  src2[i] = ff_reverse[src[i]];
636  }
637  memset(src2 + size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
638  ret = ff_ccitt_unpack(s->avctx, src2, size, dst, lines, stride,
639  s->compr, s->fax_opts);
640  if (s->bpp < 8 && s->avctx->pix_fmt == AV_PIX_FMT_PAL8)
641  for (line = 0; line < lines; line++) {
642  horizontal_fill(s, s->bpp, dst, 1, dst, 0, width, 0);
643  dst += stride;
644  }
645  return ret;
646 }
647 
649  int tile_byte_count, int dst_x, int dst_y, int w, int h)
650 {
651  TiffContext *s = avctx->priv_data;
652  uint8_t *dst_data, *src_data;
653  uint32_t dst_offset; /* offset from dst buffer in pixels */
654  int is_single_comp, is_u16, pixel_size;
655  int ret;
656 
657  if (tile_byte_count < 0 || tile_byte_count > bytestream2_get_bytes_left(&s->gb))
658  return AVERROR_INVALIDDATA;
659 
660  /* Prepare a packet and send to the MJPEG decoder */
661  av_packet_unref(s->jpkt);
662  s->jpkt->data = (uint8_t*)s->gb.buffer;
663  s->jpkt->size = tile_byte_count;
664 
665  if (s->is_bayer) {
666  MJpegDecodeContext *mjpegdecctx = s->avctx_mjpeg->priv_data;
667  /* We have to set this information here, there is no way to know if a given JPEG is a DNG-embedded
668  image or not from its own data (and we need that information when decoding it). */
669  mjpegdecctx->bayer = 1;
670  }
671 
672  ret = avcodec_send_packet(s->avctx_mjpeg, s->jpkt);
673  if (ret < 0) {
674  av_log(avctx, AV_LOG_ERROR, "Error submitting a packet for decoding\n");
675  return ret;
676  }
677 
678  ret = avcodec_receive_frame(s->avctx_mjpeg, s->jpgframe);
679  if (ret < 0) {
680  av_log(avctx, AV_LOG_ERROR, "JPEG decoding error: %s.\n", av_err2str(ret));
681 
682  /* Normally skip, error if explode */
683  if (avctx->err_recognition & AV_EF_EXPLODE)
684  return AVERROR_INVALIDDATA;
685  else
686  return 0;
687  }
688 
689  is_u16 = (s->bpp > 8);
690 
691  /* Copy the outputted tile's pixels from 'jpgframe' to 'frame' (final buffer) */
692 
693  if (s->jpgframe->width != s->avctx_mjpeg->width ||
694  s->jpgframe->height != s->avctx_mjpeg->height ||
695  s->jpgframe->format != s->avctx_mjpeg->pix_fmt)
696  return AVERROR_INVALIDDATA;
697 
698  /* See dng_blit for explanation */
699  if (s->avctx_mjpeg->width == w * 2 &&
700  s->avctx_mjpeg->height == h / 2 &&
701  s->avctx_mjpeg->pix_fmt == AV_PIX_FMT_GRAY16LE) {
702  is_single_comp = 1;
703  } else if (s->avctx_mjpeg->width >= w &&
704  s->avctx_mjpeg->height >= h &&
705  s->avctx_mjpeg->pix_fmt == (is_u16 ? AV_PIX_FMT_GRAY16 : AV_PIX_FMT_GRAY8)
706  ) {
707  is_single_comp = 0;
708  } else
709  return AVERROR_INVALIDDATA;
710 
711  pixel_size = (is_u16 ? sizeof(uint16_t) : sizeof(uint8_t));
712 
713  if (is_single_comp && !is_u16) {
714  av_log(s->avctx, AV_LOG_ERROR, "DNGs with bpp <= 8 and 1 component are unsupported\n");
715  av_frame_unref(s->jpgframe);
716  return AVERROR_PATCHWELCOME;
717  }
718 
719  dst_offset = dst_x + frame->linesize[0] * dst_y / pixel_size;
720  dst_data = frame->data[0] + dst_offset * pixel_size;
721  src_data = s->jpgframe->data[0];
722 
723  dng_blit(s,
724  dst_data,
725  frame->linesize[0] / pixel_size,
726  src_data,
727  s->jpgframe->linesize[0] / pixel_size,
728  w,
729  h,
730  is_single_comp,
731  is_u16);
732 
733  av_frame_unref(s->jpgframe);
734 
735  return 0;
736 }
737 
739  const uint8_t *src, int size, int strip_start, int lines)
740 {
741  PutByteContext pb;
742  int c, line, pixels, code, ret;
743  const uint8_t *ssrc = src;
744  int width = ((s->width * s->bpp) + 7) >> 3;
746  int is_yuv = !(desc->flags & AV_PIX_FMT_FLAG_RGB) &&
747  (desc->flags & AV_PIX_FMT_FLAG_PLANAR) &&
748  desc->nb_components >= 3;
749  int is_dng;
750 
751  if (s->planar)
752  width /= s->bppcount;
753 
754  if (size <= 0)
755  return AVERROR_INVALIDDATA;
756 
757  if (is_yuv) {
758  int bytes_per_row = (((s->width - 1) / s->subsampling[0] + 1) * s->bpp *
759  s->subsampling[0] * s->subsampling[1] + 7) >> 3;
760  av_fast_padded_malloc(&s->yuv_line, &s->yuv_line_size, bytes_per_row);
761  if (s->yuv_line == NULL) {
762  av_log(s->avctx, AV_LOG_ERROR, "Not enough memory\n");
763  return AVERROR(ENOMEM);
764  }
765  dst = s->yuv_line;
766  stride = 0;
767 
768  width = (s->width - 1) / s->subsampling[0] + 1;
769  width = width * s->subsampling[0] * s->subsampling[1] + 2*width;
770  av_assert0(width <= bytes_per_row);
771  av_assert0(s->bpp == 24);
772  }
773  if (s->is_bayer) {
774  av_assert0(width == (s->bpp * s->width + 7) >> 3);
775  }
776  if (p->format == AV_PIX_FMT_GRAY12) {
777  av_fast_padded_malloc(&s->yuv_line, &s->yuv_line_size, width);
778  if (s->yuv_line == NULL) {
779  av_log(s->avctx, AV_LOG_ERROR, "Not enough memory\n");
780  return AVERROR(ENOMEM);
781  }
782  dst = s->yuv_line;
783  stride = 0;
784  }
785 
786  if (s->compr == TIFF_DEFLATE || s->compr == TIFF_ADOBE_DEFLATE) {
787 #if CONFIG_ZLIB
788  return tiff_unpack_zlib(s, p, dst, stride, src, size, width, lines,
789  strip_start, is_yuv);
790 #else
791  av_log(s->avctx, AV_LOG_ERROR,
792  "zlib support not enabled, "
793  "deflate compression not supported\n");
794  return AVERROR(ENOSYS);
795 #endif
796  }
797  if (s->compr == TIFF_LZMA) {
798 #if CONFIG_LZMA
799  return tiff_unpack_lzma(s, p, dst, stride, src, size, width, lines,
800  strip_start, is_yuv);
801 #else
802  av_log(s->avctx, AV_LOG_ERROR,
803  "LZMA support not enabled\n");
804  return AVERROR(ENOSYS);
805 #endif
806  }
807  if (s->compr == TIFF_LZW) {
808  if (s->fill_order) {
809  if ((ret = deinvert_buffer(s, src, size)) < 0)
810  return ret;
811  ssrc = src = s->deinvert_buf;
812  }
813  if (size > 1 && !src[0] && (src[1]&1)) {
814  av_log(s->avctx, AV_LOG_ERROR, "Old style LZW is unsupported\n");
815  }
816  if ((ret = ff_lzw_decode_init(s->lzw, 8, src, size, FF_LZW_TIFF)) < 0) {
817  av_log(s->avctx, AV_LOG_ERROR, "Error initializing LZW decoder\n");
818  return ret;
819  }
820  for (line = 0; line < lines; line++) {
821  pixels = ff_lzw_decode(s->lzw, dst, width);
822  if (pixels < width) {
823  av_log(s->avctx, AV_LOG_ERROR, "Decoded only %i bytes of %i\n",
824  pixels, width);
825  return AVERROR_INVALIDDATA;
826  }
827  if (s->bpp < 8 && s->avctx->pix_fmt == AV_PIX_FMT_PAL8)
828  horizontal_fill(s, s->bpp, dst, 1, dst, 0, width, 0);
829  if (is_yuv) {
830  unpack_yuv(s, p, dst, strip_start + line);
831  line += s->subsampling[1] - 1;
832  } else if (p->format == AV_PIX_FMT_GRAY12) {
833  unpack_gray(s, p, dst, strip_start + line, width, s->bpp);
834  }
835  dst += stride;
836  }
837  return 0;
838  }
839  if (s->compr == TIFF_CCITT_RLE ||
840  s->compr == TIFF_G3 ||
841  s->compr == TIFF_G4) {
842  if (is_yuv || p->format == AV_PIX_FMT_GRAY12)
843  return AVERROR_INVALIDDATA;
844 
845  return tiff_unpack_fax(s, dst, stride, src, size, width, lines);
846  }
847 
848  bytestream2_init(&s->gb, src, size);
849  bytestream2_init_writer(&pb, dst, is_yuv ? s->yuv_line_size : (stride * lines));
850 
851  is_dng = (s->tiff_type == TIFF_TYPE_DNG || s->tiff_type == TIFF_TYPE_CINEMADNG);
852 
853  /* Decode JPEG-encoded DNGs with strips */
854  if (s->compr == TIFF_NEWJPEG && is_dng) {
855  if (s->strips > 1) {
856  av_log(s->avctx, AV_LOG_ERROR, "More than one DNG JPEG strips unsupported\n");
857  return AVERROR_PATCHWELCOME;
858  }
859  if ((ret = dng_decode_jpeg(s->avctx, p, s->stripsize, 0, 0, s->width, s->height)) < 0)
860  return ret;
861  return 0;
862  }
863 
864  if (is_dng && stride == 0)
865  return AVERROR_INVALIDDATA;
866 
867  for (line = 0; line < lines; line++) {
868  if (src - ssrc > size) {
869  av_log(s->avctx, AV_LOG_ERROR, "Source data overread\n");
870  return AVERROR_INVALIDDATA;
871  }
872 
873  if (bytestream2_get_bytes_left(&s->gb) == 0 || bytestream2_get_eof(&pb))
874  break;
875  bytestream2_seek_p(&pb, stride * line, SEEK_SET);
876  switch (s->compr) {
877  case TIFF_RAW:
878  if (ssrc + size - src < width)
879  return AVERROR_INVALIDDATA;
880 
881  if (!s->fill_order) {
882  horizontal_fill(s, s->bpp * (s->avctx->pix_fmt == AV_PIX_FMT_PAL8 || s->is_bayer),
883  dst, 1, src, 0, width, 0);
884  } else {
885  int i;
886  for (i = 0; i < width; i++)
887  dst[i] = ff_reverse[src[i]];
888  }
889 
890  /* Color processing for DNG images with uncompressed strips (non-tiled) */
891  if (is_dng) {
892  int is_u16, pixel_size_bytes, pixel_size_bits, elements;
893 
894  is_u16 = (s->bpp / s->bppcount > 8);
895  pixel_size_bits = (is_u16 ? 16 : 8);
896  pixel_size_bytes = (is_u16 ? sizeof(uint16_t) : sizeof(uint8_t));
897 
898  elements = width / pixel_size_bytes * pixel_size_bits / s->bpp * s->bppcount; // need to account for [1, 16] bpp
899  av_assert0 (elements * pixel_size_bytes <= FFABS(stride));
900  dng_blit(s,
901  dst,
902  0, // no stride, only 1 line
903  dst,
904  0, // no stride, only 1 line
905  elements,
906  1,
907  0, // single-component variation is only preset in JPEG-encoded DNGs
908  is_u16);
909  }
910 
911  src += width;
912  break;
913  case TIFF_PACKBITS:
914  for (pixels = 0; pixels < width;) {
915  if (ssrc + size - src < 2) {
916  av_log(s->avctx, AV_LOG_ERROR, "Read went out of bounds\n");
917  return AVERROR_INVALIDDATA;
918  }
919  code = s->fill_order ? (int8_t) ff_reverse[*src++]: (int8_t) *src++;
920  if (code >= 0) {
921  code++;
922  if (pixels + code > width ||
923  ssrc + size - src < code) {
924  av_log(s->avctx, AV_LOG_ERROR,
925  "Copy went out of bounds\n");
926  return AVERROR_INVALIDDATA;
927  }
928  horizontal_fill(s, s->bpp * (s->avctx->pix_fmt == AV_PIX_FMT_PAL8),
929  dst, 1, src, 0, code, pixels);
930  src += code;
931  pixels += code;
932  } else if (code != -128) { // -127..-1
933  code = (-code) + 1;
934  if (pixels + code > width) {
935  av_log(s->avctx, AV_LOG_ERROR,
936  "Run went out of bounds\n");
937  return AVERROR_INVALIDDATA;
938  }
939  c = *src++;
940  horizontal_fill(s, s->bpp * (s->avctx->pix_fmt == AV_PIX_FMT_PAL8),
941  dst, 0, NULL, c, code, pixels);
942  pixels += code;
943  }
944  }
945  if (s->fill_order) {
946  int i;
947  for (i = 0; i < width; i++)
948  dst[i] = ff_reverse[dst[i]];
949  }
950  break;
951  }
952  if (is_yuv) {
953  unpack_yuv(s, p, dst, strip_start + line);
954  line += s->subsampling[1] - 1;
955  } else if (p->format == AV_PIX_FMT_GRAY12) {
956  unpack_gray(s, p, dst, strip_start + line, width, s->bpp);
957  }
958  dst += stride;
959  }
960  return 0;
961 }
962 
964  const AVPacket *avpkt)
965 {
966  TiffContext *s = avctx->priv_data;
967  int tile_idx;
968  int tile_offset_offset, tile_offset;
969  int tile_byte_count_offset, tile_byte_count;
970  int tile_count_x, tile_count_y;
971  int tile_width, tile_length;
972  int has_width_leftover, has_height_leftover;
973  int tile_x = 0, tile_y = 0;
974  int pos_x = 0, pos_y = 0;
975  int ret;
976 
977  has_width_leftover = (s->width % s->tile_width != 0);
978  has_height_leftover = (s->height % s->tile_length != 0);
979 
980  /* Calculate tile counts (round up) */
981  tile_count_x = (s->width + s->tile_width - 1) / s->tile_width;
982  tile_count_y = (s->height + s->tile_length - 1) / s->tile_length;
983 
984  /* Iterate over the number of tiles */
985  for (tile_idx = 0; tile_idx < s->tile_count; tile_idx++) {
986  tile_x = tile_idx % tile_count_x;
987  tile_y = tile_idx / tile_count_x;
988 
989  if (has_width_leftover && tile_x == tile_count_x - 1) // If on the right-most tile
990  tile_width = s->width % s->tile_width;
991  else
992  tile_width = s->tile_width;
993 
994  if (has_height_leftover && tile_y == tile_count_y - 1) // If on the bottom-most tile
995  tile_length = s->height % s->tile_length;
996  else
997  tile_length = s->tile_length;
998 
999  /* Read tile offset */
1000  tile_offset_offset = s->tile_offsets_offset + tile_idx * sizeof(int);
1001  bytestream2_seek(&s->gb, tile_offset_offset, SEEK_SET);
1002  tile_offset = ff_tget_long(&s->gb, s->le);
1003 
1004  /* Read tile byte size */
1005  tile_byte_count_offset = s->tile_byte_counts_offset + tile_idx * sizeof(int);
1006  bytestream2_seek(&s->gb, tile_byte_count_offset, SEEK_SET);
1007  tile_byte_count = ff_tget_long(&s->gb, s->le);
1008 
1009  /* Seek to tile data */
1010  bytestream2_seek(&s->gb, tile_offset, SEEK_SET);
1011 
1012  /* Decode JPEG tile and copy it in the reference frame */
1013  ret = dng_decode_jpeg(avctx, frame, tile_byte_count, pos_x, pos_y, tile_width, tile_length);
1014 
1015  if (ret < 0)
1016  return ret;
1017 
1018  /* Advance current positions */
1019  pos_x += tile_width;
1020  if (tile_x == tile_count_x - 1) { // If on the right edge
1021  pos_x = 0;
1022  pos_y += tile_length;
1023  }
1024  }
1025 
1026  /* Frame is ready to be output */
1027  frame->pict_type = AV_PICTURE_TYPE_I;
1028  frame->key_frame = 1;
1029 
1030  return avpkt->size;
1031 }
1032 
1034 {
1035  int ret;
1036  int create_gray_palette = 0;
1037 
1038  // make sure there is no aliasing in the following switch
1039  if (s->bpp >= 100 || s->bppcount >= 10) {
1040  av_log(s->avctx, AV_LOG_ERROR,
1041  "Unsupported image parameters: bpp=%d, bppcount=%d\n",
1042  s->bpp, s->bppcount);
1043  return AVERROR_INVALIDDATA;
1044  }
1045 
1046  switch (s->planar * 1000 + s->bpp * 10 + s->bppcount + s->is_bayer * 10000) {
1047  case 11:
1048  if (!s->palette_is_set) {
1049  s->avctx->pix_fmt = AV_PIX_FMT_MONOBLACK;
1050  break;
1051  }
1052  case 21:
1053  case 41:
1054  s->avctx->pix_fmt = AV_PIX_FMT_PAL8;
1055  if (!s->palette_is_set) {
1056  create_gray_palette = 1;
1057  }
1058  break;
1059  case 81:
1060  s->avctx->pix_fmt = s->palette_is_set ? AV_PIX_FMT_PAL8 : AV_PIX_FMT_GRAY8;
1061  break;
1062  case 121:
1063  s->avctx->pix_fmt = AV_PIX_FMT_GRAY12;
1064  break;
1065  case 10081:
1066  switch (AV_RL32(s->pattern)) {
1067  case 0x02010100:
1068  s->avctx->pix_fmt = AV_PIX_FMT_BAYER_RGGB8;
1069  break;
1070  case 0x00010102:
1071  s->avctx->pix_fmt = AV_PIX_FMT_BAYER_BGGR8;
1072  break;
1073  case 0x01000201:
1074  s->avctx->pix_fmt = AV_PIX_FMT_BAYER_GBRG8;
1075  break;
1076  case 0x01020001:
1077  s->avctx->pix_fmt = AV_PIX_FMT_BAYER_GRBG8;
1078  break;
1079  default:
1080  av_log(s->avctx, AV_LOG_ERROR, "Unsupported Bayer pattern: 0x%X\n",
1081  AV_RL32(s->pattern));
1082  return AVERROR_PATCHWELCOME;
1083  }
1084  break;
1085  case 10101:
1086  case 10121:
1087  case 10141:
1088  case 10161:
1089  switch (AV_RL32(s->pattern)) {
1090  case 0x02010100:
1091  s->avctx->pix_fmt = AV_PIX_FMT_BAYER_RGGB16;
1092  break;
1093  case 0x00010102:
1094  s->avctx->pix_fmt = AV_PIX_FMT_BAYER_BGGR16;
1095  break;
1096  case 0x01000201:
1097  s->avctx->pix_fmt = AV_PIX_FMT_BAYER_GBRG16;
1098  break;
1099  case 0x01020001:
1100  s->avctx->pix_fmt = AV_PIX_FMT_BAYER_GRBG16;
1101  break;
1102  default:
1103  av_log(s->avctx, AV_LOG_ERROR, "Unsupported Bayer pattern: 0x%X\n",
1104  AV_RL32(s->pattern));
1105  return AVERROR_PATCHWELCOME;
1106  }
1107  break;
1108  case 243:
1109  if (s->photometric == TIFF_PHOTOMETRIC_YCBCR) {
1110  if (s->subsampling[0] == 1 && s->subsampling[1] == 1) {
1111  s->avctx->pix_fmt = AV_PIX_FMT_YUV444P;
1112  } else if (s->subsampling[0] == 2 && s->subsampling[1] == 1) {
1113  s->avctx->pix_fmt = AV_PIX_FMT_YUV422P;
1114  } else if (s->subsampling[0] == 4 && s->subsampling[1] == 1) {
1115  s->avctx->pix_fmt = AV_PIX_FMT_YUV411P;
1116  } else if (s->subsampling[0] == 1 && s->subsampling[1] == 2) {
1117  s->avctx->pix_fmt = AV_PIX_FMT_YUV440P;
1118  } else if (s->subsampling[0] == 2 && s->subsampling[1] == 2) {
1119  s->avctx->pix_fmt = AV_PIX_FMT_YUV420P;
1120  } else if (s->subsampling[0] == 4 && s->subsampling[1] == 4) {
1121  s->avctx->pix_fmt = AV_PIX_FMT_YUV410P;
1122  } else {
1123  av_log(s->avctx, AV_LOG_ERROR, "Unsupported YCbCr subsampling\n");
1124  return AVERROR_PATCHWELCOME;
1125  }
1126  } else
1127  s->avctx->pix_fmt = AV_PIX_FMT_RGB24;
1128  break;
1129  case 161:
1130  s->avctx->pix_fmt = s->le ? AV_PIX_FMT_GRAY16LE : AV_PIX_FMT_GRAY16BE;
1131  break;
1132  case 162:
1133  s->avctx->pix_fmt = AV_PIX_FMT_YA8;
1134  break;
1135  case 322:
1136  s->avctx->pix_fmt = s->le ? AV_PIX_FMT_YA16LE : AV_PIX_FMT_YA16BE;
1137  break;
1138  case 324:
1139  s->avctx->pix_fmt = s->photometric == TIFF_PHOTOMETRIC_SEPARATED ? AV_PIX_FMT_RGB0 : AV_PIX_FMT_RGBA;
1140  break;
1141  case 405:
1142  if (s->photometric == TIFF_PHOTOMETRIC_SEPARATED)
1143  s->avctx->pix_fmt = AV_PIX_FMT_RGBA;
1144  else {
1145  av_log(s->avctx, AV_LOG_ERROR,
1146  "bpp=40 without PHOTOMETRIC_SEPARATED is unsupported\n");
1147  return AVERROR_PATCHWELCOME;
1148  }
1149  break;
1150  case 483:
1151  s->avctx->pix_fmt = s->le ? AV_PIX_FMT_RGB48LE : AV_PIX_FMT_RGB48BE;
1152  break;
1153  case 644:
1154  s->avctx->pix_fmt = s->le ? AV_PIX_FMT_RGBA64LE : AV_PIX_FMT_RGBA64BE;
1155  break;
1156  case 1243:
1157  s->avctx->pix_fmt = AV_PIX_FMT_GBRP;
1158  break;
1159  case 1324:
1160  s->avctx->pix_fmt = AV_PIX_FMT_GBRAP;
1161  break;
1162  case 1483:
1163  s->avctx->pix_fmt = s->le ? AV_PIX_FMT_GBRP16LE : AV_PIX_FMT_GBRP16BE;
1164  break;
1165  case 1644:
1166  s->avctx->pix_fmt = s->le ? AV_PIX_FMT_GBRAP16LE : AV_PIX_FMT_GBRAP16BE;
1167  break;
1168  default:
1169  av_log(s->avctx, AV_LOG_ERROR,
1170  "This format is not supported (bpp=%d, bppcount=%d)\n",
1171  s->bpp, s->bppcount);
1172  return AVERROR_INVALIDDATA;
1173  }
1174 
1175  if (s->photometric == TIFF_PHOTOMETRIC_YCBCR) {
1176  const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(s->avctx->pix_fmt);
1177  if((desc->flags & AV_PIX_FMT_FLAG_RGB) ||
1178  !(desc->flags & AV_PIX_FMT_FLAG_PLANAR) ||
1179  desc->nb_components < 3) {
1180  av_log(s->avctx, AV_LOG_ERROR, "Unsupported YCbCr variant\n");
1181  return AVERROR_INVALIDDATA;
1182  }
1183  }
1184 
1185  if (s->width != s->avctx->width || s->height != s->avctx->height) {
1186  ret = ff_set_dimensions(s->avctx, s->width, s->height);
1187  if (ret < 0)
1188  return ret;
1189  }
1190  if ((ret = ff_thread_get_buffer(s->avctx, frame, 0)) < 0)
1191  return ret;
1192  if (s->avctx->pix_fmt == AV_PIX_FMT_PAL8) {
1193  if (!create_gray_palette)
1194  memcpy(frame->f->data[1], s->palette, sizeof(s->palette));
1195  else {
1196  /* make default grayscale pal */
1197  int i;
1198  uint32_t *pal = (uint32_t *)frame->f->data[1];
1199  for (i = 0; i < 1<<s->bpp; i++)
1200  pal[i] = 0xFFU << 24 | i * 255 / ((1<<s->bpp) - 1) * 0x010101;
1201  }
1202  }
1203  return 0;
1204 }
1205 
1206 static void set_sar(TiffContext *s, unsigned tag, unsigned num, unsigned den)
1207 {
1208  int offset = tag == TIFF_YRES ? 2 : 0;
1209  s->res[offset++] = num;
1210  s->res[offset] = den;
1211  if (s->res[0] && s->res[1] && s->res[2] && s->res[3]) {
1212  uint64_t num = s->res[2] * (uint64_t)s->res[1];
1213  uint64_t den = s->res[0] * (uint64_t)s->res[3];
1214  if (num > INT64_MAX || den > INT64_MAX) {
1215  num = num >> 1;
1216  den = den >> 1;
1217  }
1218  av_reduce(&s->avctx->sample_aspect_ratio.num, &s->avctx->sample_aspect_ratio.den,
1219  num, den, INT32_MAX);
1220  if (!s->avctx->sample_aspect_ratio.den)
1221  s->avctx->sample_aspect_ratio = (AVRational) {0, 1};
1222  }
1223 }
1224 
1226 {
1227  AVFrameSideData *sd;
1228  GetByteContext gb_temp;
1229  unsigned tag, type, count, off, value = 0, value2 = 1; // value2 is a denominator so init. to 1
1230  int i, start;
1231  int pos;
1232  int ret;
1233  double *dp;
1234 
1235  ret = ff_tread_tag(&s->gb, s->le, &tag, &type, &count, &start);
1236  if (ret < 0) {
1237  goto end;
1238  }
1239  if (tag <= s->last_tag)
1240  return AVERROR_INVALIDDATA;
1241 
1242  // We ignore TIFF_STRIP_SIZE as it is sometimes in the logic but wrong order around TIFF_STRIP_OFFS
1243  if (tag != TIFF_STRIP_SIZE)
1244  s->last_tag = tag;
1245 
1246  off = bytestream2_tell(&s->gb);
1247  if (count == 1) {
1248  switch (type) {
1249  case TIFF_BYTE:
1250  case TIFF_SHORT:
1251  case TIFF_LONG:
1252  value = ff_tget(&s->gb, type, s->le);
1253  break;
1254  case TIFF_RATIONAL:
1255  value = ff_tget(&s->gb, TIFF_LONG, s->le);
1256  value2 = ff_tget(&s->gb, TIFF_LONG, s->le);
1257  if (!value2) {
1258  av_log(s->avctx, AV_LOG_ERROR, "Invalid denominator in rational\n");
1259  return AVERROR_INVALIDDATA;
1260  }
1261 
1262  break;
1263  case TIFF_STRING:
1264  if (count <= 4) {
1265  break;
1266  }
1267  default:
1268  value = UINT_MAX;
1269  }
1270  }
1271 
1272  switch (tag) {
1273  case TIFF_SUBFILE:
1274  s->is_thumbnail = (value != 0);
1275  break;
1276  case TIFF_WIDTH:
1277  s->width = value;
1278  break;
1279  case TIFF_HEIGHT:
1280  s->height = value;
1281  break;
1282  case TIFF_BPP:
1283  if (count > 5 || count <= 0) {
1284  av_log(s->avctx, AV_LOG_ERROR,
1285  "This format is not supported (bpp=%d, %d components)\n",
1286  value, count);
1287  return AVERROR_INVALIDDATA;
1288  }
1289  s->bppcount = count;
1290  if (count == 1)
1291  s->bpp = value;
1292  else {
1293  switch (type) {
1294  case TIFF_BYTE:
1295  case TIFF_SHORT:
1296  case TIFF_LONG:
1297  s->bpp = 0;
1298  if (bytestream2_get_bytes_left(&s->gb) < type_sizes[type] * count)
1299  return AVERROR_INVALIDDATA;
1300  for (i = 0; i < count; i++)
1301  s->bpp += ff_tget(&s->gb, type, s->le);
1302  break;
1303  default:
1304  s->bpp = -1;
1305  }
1306  }
1307  break;
1309  if (count != 1) {
1310  av_log(s->avctx, AV_LOG_ERROR,
1311  "Samples per pixel requires a single value, many provided\n");
1312  return AVERROR_INVALIDDATA;
1313  }
1314  if (value > 5 || value <= 0) {
1315  av_log(s->avctx, AV_LOG_ERROR,
1316  "Invalid samples per pixel %d\n", value);
1317  return AVERROR_INVALIDDATA;
1318  }
1319  if (s->bppcount == 1)
1320  s->bpp *= value;
1321  s->bppcount = value;
1322  break;
1323  case TIFF_COMPR:
1324  s->compr = value;
1325  av_log(s->avctx, AV_LOG_DEBUG, "compression: %d\n", s->compr);
1326  s->predictor = 0;
1327  switch (s->compr) {
1328  case TIFF_RAW:
1329  case TIFF_PACKBITS:
1330  case TIFF_LZW:
1331  case TIFF_CCITT_RLE:
1332  break;
1333  case TIFF_G3:
1334  case TIFF_G4:
1335  s->fax_opts = 0;
1336  break;
1337  case TIFF_DEFLATE:
1338  case TIFF_ADOBE_DEFLATE:
1339 #if CONFIG_ZLIB
1340  break;
1341 #else
1342  av_log(s->avctx, AV_LOG_ERROR, "Deflate: ZLib not compiled in\n");
1343  return AVERROR(ENOSYS);
1344 #endif
1345  case TIFF_JPEG:
1346  case TIFF_NEWJPEG:
1347  s->is_jpeg = 1;
1348  break;
1349  case TIFF_LZMA:
1350 #if CONFIG_LZMA
1351  break;
1352 #else
1353  av_log(s->avctx, AV_LOG_ERROR, "LZMA not compiled in\n");
1354  return AVERROR(ENOSYS);
1355 #endif
1356  default:
1357  av_log(s->avctx, AV_LOG_ERROR, "Unknown compression method %i\n",
1358  s->compr);
1359  return AVERROR_INVALIDDATA;
1360  }
1361  break;
1362  case TIFF_ROWSPERSTRIP:
1363  if (!value || (type == TIFF_LONG && value == UINT_MAX))
1364  value = s->height;
1365  s->rps = FFMIN(value, s->height);
1366  break;
1367  case TIFF_STRIP_OFFS:
1368  if (count == 1) {
1369  if (value > INT_MAX) {
1370  av_log(s->avctx, AV_LOG_ERROR,
1371  "strippos %u too large\n", value);
1372  return AVERROR_INVALIDDATA;
1373  }
1374  s->strippos = 0;
1375  s->stripoff = value;
1376  } else
1377  s->strippos = off;
1378  s->strips = count;
1379  if (s->strips == 1)
1380  s->rps = s->height;
1381  s->sot = type;
1382  break;
1383  case TIFF_STRIP_SIZE:
1384  if (count == 1) {
1385  if (value > INT_MAX) {
1386  av_log(s->avctx, AV_LOG_ERROR,
1387  "stripsize %u too large\n", value);
1388  return AVERROR_INVALIDDATA;
1389  }
1390  s->stripsizesoff = 0;
1391  s->stripsize = value;
1392  s->strips = 1;
1393  } else {
1394  s->stripsizesoff = off;
1395  }
1396  s->strips = count;
1397  s->sstype = type;
1398  break;
1399  case TIFF_XRES:
1400  case TIFF_YRES:
1401  set_sar(s, tag, value, value2);
1402  break;
1403  case TIFF_TILE_OFFSETS:
1404  s->tile_offsets_offset = off;
1405  s->tile_count = count;
1406  s->is_tiled = 1;
1407  break;
1408  case TIFF_TILE_BYTE_COUNTS:
1409  s->tile_byte_counts_offset = off;
1410  break;
1411  case TIFF_TILE_LENGTH:
1412  s->tile_length = value;
1413  break;
1414  case TIFF_TILE_WIDTH:
1415  s->tile_width = value;
1416  break;
1417  case TIFF_PREDICTOR:
1418  s->predictor = value;
1419  break;
1420  case TIFF_SUB_IFDS:
1421  if (count == 1)
1422  s->sub_ifd = value;
1423  else if (count > 1)
1424  s->sub_ifd = ff_tget(&s->gb, TIFF_LONG, s->le); /** Only get the first SubIFD */
1425  break;
1427  if (count > FF_ARRAY_ELEMS(s->dng_lut))
1428  return AVERROR_INVALIDDATA;
1429  for (int i = 0; i < count; i++)
1430  s->dng_lut[i] = ff_tget(&s->gb, type, s->le);
1431  break;
1432  case DNG_BLACK_LEVEL:
1433  if (count > 1) { /* Use the first value in the pattern (assume they're all the same) */
1434  if (type == TIFF_RATIONAL) {
1435  value = ff_tget(&s->gb, TIFF_LONG, s->le);
1436  value2 = ff_tget(&s->gb, TIFF_LONG, s->le);
1437  if (!value2) {
1438  av_log(s->avctx, AV_LOG_ERROR, "Invalid black level denominator\n");
1439  return AVERROR_INVALIDDATA;
1440  }
1441 
1442  s->black_level = value / value2;
1443  } else
1444  s->black_level = ff_tget(&s->gb, type, s->le);
1445  av_log(s->avctx, AV_LOG_WARNING, "Assuming black level pattern values are identical\n");
1446  } else {
1447  s->black_level = value / value2;
1448  }
1449  break;
1450  case DNG_WHITE_LEVEL:
1451  s->white_level = value;
1452  break;
1453  case TIFF_CFA_PATTERN_DIM:
1454  if (count != 2 || (ff_tget(&s->gb, type, s->le) != 2 &&
1455  ff_tget(&s->gb, type, s->le) != 2)) {
1456  av_log(s->avctx, AV_LOG_ERROR, "CFA Pattern dimensions are not 2x2\n");
1457  return AVERROR_INVALIDDATA;
1458  }
1459  break;
1460  case TIFF_CFA_PATTERN:
1461  s->is_bayer = 1;
1462  s->pattern[0] = ff_tget(&s->gb, type, s->le);
1463  s->pattern[1] = ff_tget(&s->gb, type, s->le);
1464  s->pattern[2] = ff_tget(&s->gb, type, s->le);
1465  s->pattern[3] = ff_tget(&s->gb, type, s->le);
1466  break;
1467  case TIFF_PHOTOMETRIC:
1468  switch (value) {
1471  case TIFF_PHOTOMETRIC_RGB:
1475  case TIFF_PHOTOMETRIC_CFA:
1476  case TIFF_PHOTOMETRIC_LINEAR_RAW: // Used by DNG images
1477  s->photometric = value;
1478  break;
1486  "PhotometricInterpretation 0x%04X",
1487  value);
1488  return AVERROR_PATCHWELCOME;
1489  default:
1490  av_log(s->avctx, AV_LOG_ERROR, "PhotometricInterpretation %u is "
1491  "unknown\n", value);
1492  return AVERROR_INVALIDDATA;
1493  }
1494  break;
1495  case TIFF_FILL_ORDER:
1496  if (value < 1 || value > 2) {
1497  av_log(s->avctx, AV_LOG_ERROR,
1498  "Unknown FillOrder value %d, trying default one\n", value);
1499  value = 1;
1500  }
1501  s->fill_order = value - 1;
1502  break;
1503  case TIFF_PAL: {
1504  GetByteContext pal_gb[3];
1505  off = type_sizes[type];
1506  if (count / 3 > 256 ||
1507  bytestream2_get_bytes_left(&s->gb) < count / 3 * off * 3)
1508  return AVERROR_INVALIDDATA;
1509 
1510  pal_gb[0] = pal_gb[1] = pal_gb[2] = s->gb;
1511  bytestream2_skip(&pal_gb[1], count / 3 * off);
1512  bytestream2_skip(&pal_gb[2], count / 3 * off * 2);
1513 
1514  off = (type_sizes[type] - 1) << 3;
1515  if (off > 31U) {
1516  av_log(s->avctx, AV_LOG_ERROR, "palette shift %d is out of range\n", off);
1517  return AVERROR_INVALIDDATA;
1518  }
1519 
1520  for (i = 0; i < count / 3; i++) {
1521  uint32_t p = 0xFF000000;
1522  p |= (ff_tget(&pal_gb[0], type, s->le) >> off) << 16;
1523  p |= (ff_tget(&pal_gb[1], type, s->le) >> off) << 8;
1524  p |= ff_tget(&pal_gb[2], type, s->le) >> off;
1525  s->palette[i] = p;
1526  }
1527  s->palette_is_set = 1;
1528  break;
1529  }
1530  case TIFF_PLANAR:
1531  s->planar = value == 2;
1532  break;
1534  if (count != 2) {
1535  av_log(s->avctx, AV_LOG_ERROR, "subsample count invalid\n");
1536  return AVERROR_INVALIDDATA;
1537  }
1538  for (i = 0; i < count; i++) {
1539  s->subsampling[i] = ff_tget(&s->gb, type, s->le);
1540  if (s->subsampling[i] <= 0) {
1541  av_log(s->avctx, AV_LOG_ERROR, "subsampling %d is invalid\n", s->subsampling[i]);
1542  s->subsampling[i] = 1;
1543  return AVERROR_INVALIDDATA;
1544  }
1545  }
1546  break;
1547  case TIFF_T4OPTIONS:
1548  if (s->compr == TIFF_G3)
1549  s->fax_opts = value;
1550  break;
1551  case TIFF_T6OPTIONS:
1552  if (s->compr == TIFF_G4)
1553  s->fax_opts = value;
1554  break;
1555 #define ADD_METADATA(count, name, sep)\
1556  if ((ret = add_metadata(count, type, name, sep, s, frame)) < 0) {\
1557  av_log(s->avctx, AV_LOG_ERROR, "Error allocating temporary buffer\n");\
1558  goto end;\
1559  }
1561  ADD_METADATA(count, "ModelPixelScaleTag", NULL);
1562  break;
1564  ADD_METADATA(count, "ModelTransformationTag", NULL);
1565  break;
1566  case TIFF_MODEL_TIEPOINT:
1567  ADD_METADATA(count, "ModelTiepointTag", NULL);
1568  break;
1570  if (s->geotag_count) {
1571  avpriv_request_sample(s->avctx, "Multiple geo key directories");
1572  return AVERROR_INVALIDDATA;
1573  }
1574  ADD_METADATA(1, "GeoTIFF_Version", NULL);
1575  ADD_METADATA(2, "GeoTIFF_Key_Revision", ".");
1576  s->geotag_count = ff_tget_short(&s->gb, s->le);
1577  if (s->geotag_count > count / 4 - 1) {
1578  s->geotag_count = count / 4 - 1;
1579  av_log(s->avctx, AV_LOG_WARNING, "GeoTIFF key directory buffer shorter than specified\n");
1580  }
1581  if ( bytestream2_get_bytes_left(&s->gb) < s->geotag_count * sizeof(int16_t) * 4
1582  || s->geotag_count == 0) {
1583  s->geotag_count = 0;
1584  return -1;
1585  }
1586  s->geotags = av_mallocz_array(s->geotag_count, sizeof(TiffGeoTag));
1587  if (!s->geotags) {
1588  av_log(s->avctx, AV_LOG_ERROR, "Error allocating temporary buffer\n");
1589  s->geotag_count = 0;
1590  goto end;
1591  }
1592  for (i = 0; i < s->geotag_count; i++) {
1593  s->geotags[i].key = ff_tget_short(&s->gb, s->le);
1594  s->geotags[i].type = ff_tget_short(&s->gb, s->le);
1595  s->geotags[i].count = ff_tget_short(&s->gb, s->le);
1596 
1597  if (!s->geotags[i].type)
1598  s->geotags[i].val = get_geokey_val(s->geotags[i].key, ff_tget_short(&s->gb, s->le));
1599  else
1600  s->geotags[i].offset = ff_tget_short(&s->gb, s->le);
1601  }
1602  break;
1604  if (count >= INT_MAX / sizeof(int64_t))
1605  return AVERROR_INVALIDDATA;
1606  if (bytestream2_get_bytes_left(&s->gb) < count * sizeof(int64_t))
1607  return AVERROR_INVALIDDATA;
1608  dp = av_malloc_array(count, sizeof(double));
1609  if (!dp) {
1610  av_log(s->avctx, AV_LOG_ERROR, "Error allocating temporary buffer\n");
1611  goto end;
1612  }
1613  for (i = 0; i < count; i++)
1614  dp[i] = ff_tget_double(&s->gb, s->le);
1615  for (i = 0; i < s->geotag_count; i++) {
1616  if (s->geotags[i].type == TIFF_GEO_DOUBLE_PARAMS) {
1617  if (s->geotags[i].count == 0
1618  || s->geotags[i].offset + s->geotags[i].count > count) {
1619  av_log(s->avctx, AV_LOG_WARNING, "Invalid GeoTIFF key %d\n", s->geotags[i].key);
1620  } else if (s->geotags[i].val) {
1621  av_log(s->avctx, AV_LOG_WARNING, "Duplicate GeoTIFF key %d\n", s->geotags[i].key);
1622  } else {
1623  char *ap = doubles2str(&dp[s->geotags[i].offset], s->geotags[i].count, ", ");
1624  if (!ap) {
1625  av_log(s->avctx, AV_LOG_ERROR, "Error allocating temporary buffer\n");
1626  av_freep(&dp);
1627  return AVERROR(ENOMEM);
1628  }
1629  s->geotags[i].val = ap;
1630  }
1631  }
1632  }
1633  av_freep(&dp);
1634  break;
1635  case TIFF_GEO_ASCII_PARAMS:
1636  pos = bytestream2_tell(&s->gb);
1637  for (i = 0; i < s->geotag_count; i++) {
1638  if (s->geotags[i].type == TIFF_GEO_ASCII_PARAMS) {
1639  if (s->geotags[i].count == 0
1640  || s->geotags[i].offset + s->geotags[i].count > count) {
1641  av_log(s->avctx, AV_LOG_WARNING, "Invalid GeoTIFF key %d\n", s->geotags[i].key);
1642  } else {
1643  char *ap;
1644 
1645  bytestream2_seek(&s->gb, pos + s->geotags[i].offset, SEEK_SET);
1646  if (bytestream2_get_bytes_left(&s->gb) < s->geotags[i].count)
1647  return AVERROR_INVALIDDATA;
1648  if (s->geotags[i].val)
1649  return AVERROR_INVALIDDATA;
1650  ap = av_malloc(s->geotags[i].count);
1651  if (!ap) {
1652  av_log(s->avctx, AV_LOG_ERROR, "Error allocating temporary buffer\n");
1653  return AVERROR(ENOMEM);
1654  }
1655  bytestream2_get_bufferu(&s->gb, ap, s->geotags[i].count);
1656  ap[s->geotags[i].count - 1] = '\0'; //replace the "|" delimiter with a 0 byte
1657  s->geotags[i].val = ap;
1658  }
1659  }
1660  }
1661  break;
1662  case TIFF_ICC_PROFILE:
1663  gb_temp = s->gb;
1664  bytestream2_seek(&gb_temp, SEEK_SET, off);
1665 
1666  if (bytestream2_get_bytes_left(&gb_temp) < count)
1667  return AVERROR_INVALIDDATA;
1668 
1670  if (!sd)
1671  return AVERROR(ENOMEM);
1672 
1673  bytestream2_get_bufferu(&gb_temp, sd->data, count);
1674  break;
1675  case TIFF_ARTIST:
1676  ADD_METADATA(count, "artist", NULL);
1677  break;
1678  case TIFF_COPYRIGHT:
1679  ADD_METADATA(count, "copyright", NULL);
1680  break;
1681  case TIFF_DATE:
1682  ADD_METADATA(count, "date", NULL);
1683  break;
1684  case TIFF_DOCUMENT_NAME:
1685  ADD_METADATA(count, "document_name", NULL);
1686  break;
1687  case TIFF_HOST_COMPUTER:
1688  ADD_METADATA(count, "computer", NULL);
1689  break;
1691  ADD_METADATA(count, "description", NULL);
1692  break;
1693  case TIFF_MAKE:
1694  ADD_METADATA(count, "make", NULL);
1695  break;
1696  case TIFF_MODEL:
1697  ADD_METADATA(count, "model", NULL);
1698  break;
1699  case TIFF_PAGE_NAME:
1700  ADD_METADATA(count, "page_name", NULL);
1701  break;
1702  case TIFF_PAGE_NUMBER:
1703  ADD_METADATA(count, "page_number", " / ");
1704  // need to seek back to re-read the page number
1705  bytestream2_seek(&s->gb, -count * sizeof(uint16_t), SEEK_CUR);
1706  // read the page number
1707  s->cur_page = ff_tget(&s->gb, TIFF_SHORT, s->le);
1708  // get back to where we were before the previous seek
1709  bytestream2_seek(&s->gb, count * sizeof(uint16_t) - sizeof(uint16_t), SEEK_CUR);
1710  break;
1711  case TIFF_SOFTWARE_NAME:
1712  ADD_METADATA(count, "software", NULL);
1713  break;
1714  case DNG_VERSION:
1715  if (count == 4) {
1716  unsigned int ver[4];
1717  ver[0] = ff_tget(&s->gb, type, s->le);
1718  ver[1] = ff_tget(&s->gb, type, s->le);
1719  ver[2] = ff_tget(&s->gb, type, s->le);
1720  ver[3] = ff_tget(&s->gb, type, s->le);
1721 
1722  av_log(s->avctx, AV_LOG_DEBUG, "DNG file, version %u.%u.%u.%u\n",
1723  ver[0], ver[1], ver[2], ver[3]);
1724 
1726  }
1727  break;
1728  case CINEMADNG_TIME_CODES:
1729  case CINEMADNG_FRAME_RATE:
1730  case CINEMADNG_T_STOP:
1731  case CINEMADNG_REEL_NAME:
1734  break;
1735  default:
1736  if (s->avctx->err_recognition & AV_EF_EXPLODE) {
1737  av_log(s->avctx, AV_LOG_ERROR,
1738  "Unknown or unsupported tag %d/0x%0X\n",
1739  tag, tag);
1740  return AVERROR_INVALIDDATA;
1741  }
1742  }
1743 end:
1744  if (s->bpp > 64U) {
1745  av_log(s->avctx, AV_LOG_ERROR,
1746  "This format is not supported (bpp=%d, %d components)\n",
1747  s->bpp, count);
1748  s->bpp = 0;
1749  return AVERROR_INVALIDDATA;
1750  }
1751  bytestream2_seek(&s->gb, start, SEEK_SET);
1752  return 0;
1753 }
1754 
1755 static int decode_frame(AVCodecContext *avctx,
1756  void *data, int *got_frame, AVPacket *avpkt)
1757 {
1758  TiffContext *const s = avctx->priv_data;
1759  AVFrame *const p = data;
1760  ThreadFrame frame = { .f = data };
1761  unsigned off, last_off;
1762  int le, ret, plane, planes;
1763  int i, j, entries, stride;
1764  unsigned soff, ssize;
1765  uint8_t *dst;
1766  GetByteContext stripsizes;
1767  GetByteContext stripdata;
1768  int retry_for_subifd, retry_for_page;
1769  int is_dng;
1770  int has_tile_bits, has_strip_bits;
1771 
1772  bytestream2_init(&s->gb, avpkt->data, avpkt->size);
1773 
1774  // parse image header
1775  if ((ret = ff_tdecode_header(&s->gb, &le, &off))) {
1776  av_log(avctx, AV_LOG_ERROR, "Invalid TIFF header\n");
1777  return ret;
1778  } else if (off >= UINT_MAX - 14 || avpkt->size < off + 14) {
1779  av_log(avctx, AV_LOG_ERROR, "IFD offset is greater than image size\n");
1780  return AVERROR_INVALIDDATA;
1781  }
1782  s->le = le;
1783  // TIFF_BPP is not a required tag and defaults to 1
1784 
1785  s->tiff_type = TIFF_TYPE_TIFF;
1786 again:
1787  s->is_thumbnail = 0;
1788  s->bppcount = s->bpp = 1;
1789  s->photometric = TIFF_PHOTOMETRIC_NONE;
1790  s->compr = TIFF_RAW;
1791  s->fill_order = 0;
1792  s->white_level = 0;
1793  s->is_bayer = 0;
1794  s->is_tiled = 0;
1795  s->is_jpeg = 0;
1796  s->cur_page = 0;
1797  s->last_tag = 0;
1798 
1799  for (i = 0; i < 65536; i++)
1800  s->dng_lut[i] = i;
1801 
1802  free_geotags(s);
1803 
1804  // Reset these offsets so we can tell if they were set this frame
1805  s->stripsizesoff = s->strippos = 0;
1806  /* parse image file directory */
1807  bytestream2_seek(&s->gb, off, SEEK_SET);
1808  entries = ff_tget_short(&s->gb, le);
1809  if (bytestream2_get_bytes_left(&s->gb) < entries * 12)
1810  return AVERROR_INVALIDDATA;
1811  for (i = 0; i < entries; i++) {
1812  if ((ret = tiff_decode_tag(s, p)) < 0)
1813  return ret;
1814  }
1815 
1816  if (s->get_thumbnail && !s->is_thumbnail) {
1817  av_log(avctx, AV_LOG_INFO, "No embedded thumbnail present\n");
1818  return AVERROR_EOF;
1819  }
1820 
1821  /** whether we should process this IFD's SubIFD */
1822  retry_for_subifd = s->sub_ifd && (s->get_subimage || (!s->get_thumbnail && s->is_thumbnail));
1823  /** whether we should process this multi-page IFD's next page */
1824  retry_for_page = s->get_page && s->cur_page + 1 < s->get_page; // get_page is 1-indexed
1825 
1826  last_off = off;
1827  if (retry_for_page) {
1828  // set offset to the next IFD
1829  off = ff_tget_long(&s->gb, le);
1830  } else if (retry_for_subifd) {
1831  // set offset to the SubIFD
1832  off = s->sub_ifd;
1833  }
1834 
1835  if (retry_for_subifd || retry_for_page) {
1836  if (!off) {
1837  av_log(avctx, AV_LOG_ERROR, "Requested entry not found\n");
1838  return AVERROR_INVALIDDATA;
1839  }
1840  if (off <= last_off) {
1841  avpriv_request_sample(s->avctx, "non increasing IFD offset");
1842  return AVERROR_INVALIDDATA;
1843  }
1844  if (off >= UINT_MAX - 14 || avpkt->size < off + 14) {
1845  av_log(avctx, AV_LOG_ERROR, "IFD offset is greater than image size\n");
1846  return AVERROR_INVALIDDATA;
1847  }
1848  s->sub_ifd = 0;
1849  goto again;
1850  }
1851 
1852  /* At this point we've decided on which (Sub)IFD to process */
1853 
1854  is_dng = (s->tiff_type == TIFF_TYPE_DNG || s->tiff_type == TIFF_TYPE_CINEMADNG);
1855 
1856  for (i = 0; i<s->geotag_count; i++) {
1857  const char *keyname = get_geokey_name(s->geotags[i].key);
1858  if (!keyname) {
1859  av_log(avctx, AV_LOG_WARNING, "Unknown or unsupported GeoTIFF key %d\n", s->geotags[i].key);
1860  continue;
1861  }
1862  if (get_geokey_type(s->geotags[i].key) != s->geotags[i].type) {
1863  av_log(avctx, AV_LOG_WARNING, "Type of GeoTIFF key %d is wrong\n", s->geotags[i].key);
1864  continue;
1865  }
1866  ret = av_dict_set(&p->metadata, keyname, s->geotags[i].val, 0);
1867  if (ret<0) {
1868  av_log(avctx, AV_LOG_ERROR, "Writing metadata with key '%s' failed\n", keyname);
1869  return ret;
1870  }
1871  }
1872 
1873  if (is_dng) {
1874  int bps;
1875 
1876  if (s->bpp % s->bppcount)
1877  return AVERROR_INVALIDDATA;
1878  bps = s->bpp / s->bppcount;
1879  if (bps < 8 || bps > 32)
1880  return AVERROR_INVALIDDATA;
1881 
1882  if (s->white_level == 0)
1883  s->white_level = (1LL << bps) - 1; /* Default value as per the spec */
1884 
1885  if (s->white_level <= s->black_level) {
1886  av_log(avctx, AV_LOG_ERROR, "BlackLevel (%"PRId32") must be less than WhiteLevel (%"PRId32")\n",
1887  s->black_level, s->white_level);
1888  return AVERROR_INVALIDDATA;
1889  }
1890 
1891  if (s->planar)
1892  return AVERROR_PATCHWELCOME;
1893  }
1894 
1895  if (!s->is_tiled && !s->strippos && !s->stripoff) {
1896  av_log(avctx, AV_LOG_ERROR, "Image data is missing\n");
1897  return AVERROR_INVALIDDATA;
1898  }
1899 
1900  has_tile_bits = s->is_tiled || s->tile_byte_counts_offset || s->tile_offsets_offset || s->tile_width || s->tile_length || s->tile_count;
1901  has_strip_bits = s->strippos || s->strips || s->stripoff || s->rps || s->sot || s->sstype || s->stripsize || s->stripsizesoff;
1902 
1903  if (has_tile_bits && has_strip_bits) {
1904  int tiled_dng = s->is_tiled && is_dng;
1905  av_log(avctx, tiled_dng ? AV_LOG_WARNING : AV_LOG_ERROR, "Tiled TIFF is not allowed to strip\n");
1906  if (!tiled_dng)
1907  return AVERROR_INVALIDDATA;
1908  }
1909 
1910  /* now we have the data and may start decoding */
1911  if ((ret = init_image(s, &frame)) < 0)
1912  return ret;
1913 
1914  if (!s->is_tiled || has_strip_bits) {
1915  if (s->strips == 1 && !s->stripsize) {
1916  av_log(avctx, AV_LOG_WARNING, "Image data size missing\n");
1917  s->stripsize = avpkt->size - s->stripoff;
1918  }
1919 
1920  if (s->stripsizesoff) {
1921  if (s->stripsizesoff >= (unsigned)avpkt->size)
1922  return AVERROR_INVALIDDATA;
1923  bytestream2_init(&stripsizes, avpkt->data + s->stripsizesoff,
1924  avpkt->size - s->stripsizesoff);
1925  }
1926  if (s->strippos) {
1927  if (s->strippos >= (unsigned)avpkt->size)
1928  return AVERROR_INVALIDDATA;
1929  bytestream2_init(&stripdata, avpkt->data + s->strippos,
1930  avpkt->size - s->strippos);
1931  }
1932 
1933  if (s->rps <= 0 || s->rps % s->subsampling[1]) {
1934  av_log(avctx, AV_LOG_ERROR, "rps %d invalid\n", s->rps);
1935  return AVERROR_INVALIDDATA;
1936  }
1937  }
1938 
1939  if (s->photometric == TIFF_PHOTOMETRIC_LINEAR_RAW ||
1940  s->photometric == TIFF_PHOTOMETRIC_CFA) {
1942  } else if (s->photometric == TIFF_PHOTOMETRIC_BLACK_IS_ZERO) {
1944  }
1945 
1946  /* Handle DNG images with JPEG-compressed tiles */
1947 
1948  if (is_dng && s->is_tiled) {
1949  if (!s->is_jpeg) {
1950  avpriv_report_missing_feature(avctx, "DNG uncompressed tiled images");
1951  return AVERROR_PATCHWELCOME;
1952  } else if (!s->is_bayer) {
1953  avpriv_report_missing_feature(avctx, "DNG JPG-compressed tiled non-bayer-encoded images");
1954  return AVERROR_PATCHWELCOME;
1955  } else {
1956  if ((ret = dng_decode_tiles(avctx, (AVFrame*)data, avpkt)) > 0)
1957  *got_frame = 1;
1958  return ret;
1959  }
1960  }
1961 
1962  /* Handle TIFF images and DNG images with uncompressed strips (non-tiled) */
1963 
1964  planes = s->planar ? s->bppcount : 1;
1965  for (plane = 0; plane < planes; plane++) {
1966  uint8_t *five_planes = NULL;
1967  int remaining = avpkt->size;
1968  int decoded_height;
1969  stride = p->linesize[plane];
1970  dst = p->data[plane];
1971  if (s->photometric == TIFF_PHOTOMETRIC_SEPARATED &&
1972  s->avctx->pix_fmt == AV_PIX_FMT_RGBA) {
1973  stride = stride * 5 / 4;
1974  five_planes =
1975  dst = av_malloc(stride * s->height);
1976  if (!dst)
1977  return AVERROR(ENOMEM);
1978  }
1979  for (i = 0; i < s->height; i += s->rps) {
1980  if (i)
1981  dst += s->rps * stride;
1982  if (s->stripsizesoff)
1983  ssize = ff_tget(&stripsizes, s->sstype, le);
1984  else
1985  ssize = s->stripsize;
1986 
1987  if (s->strippos)
1988  soff = ff_tget(&stripdata, s->sot, le);
1989  else
1990  soff = s->stripoff;
1991 
1992  if (soff > avpkt->size || ssize > avpkt->size - soff || ssize > remaining) {
1993  av_log(avctx, AV_LOG_ERROR, "Invalid strip size/offset\n");
1994  av_freep(&five_planes);
1995  return AVERROR_INVALIDDATA;
1996  }
1997  remaining -= ssize;
1998  if ((ret = tiff_unpack_strip(s, p, dst, stride, avpkt->data + soff, ssize, i,
1999  FFMIN(s->rps, s->height - i))) < 0) {
2000  if (avctx->err_recognition & AV_EF_EXPLODE) {
2001  av_freep(&five_planes);
2002  return ret;
2003  }
2004  break;
2005  }
2006  }
2007  decoded_height = FFMIN(i, s->height);
2008 
2009  if (s->predictor == 2) {
2010  if (s->photometric == TIFF_PHOTOMETRIC_YCBCR) {
2011  av_log(s->avctx, AV_LOG_ERROR, "predictor == 2 with YUV is unsupported");
2012  return AVERROR_PATCHWELCOME;
2013  }
2014  dst = five_planes ? five_planes : p->data[plane];
2015  soff = s->bpp >> 3;
2016  if (s->planar)
2017  soff = FFMAX(soff / s->bppcount, 1);
2018  ssize = s->width * soff;
2019  if (s->avctx->pix_fmt == AV_PIX_FMT_RGB48LE ||
2020  s->avctx->pix_fmt == AV_PIX_FMT_RGBA64LE ||
2021  s->avctx->pix_fmt == AV_PIX_FMT_GRAY16LE ||
2022  s->avctx->pix_fmt == AV_PIX_FMT_YA16LE ||
2023  s->avctx->pix_fmt == AV_PIX_FMT_GBRP16LE ||
2024  s->avctx->pix_fmt == AV_PIX_FMT_GBRAP16LE) {
2025  for (i = 0; i < decoded_height; i++) {
2026  for (j = soff; j < ssize; j += 2)
2027  AV_WL16(dst + j, AV_RL16(dst + j) + AV_RL16(dst + j - soff));
2028  dst += stride;
2029  }
2030  } else if (s->avctx->pix_fmt == AV_PIX_FMT_RGB48BE ||
2031  s->avctx->pix_fmt == AV_PIX_FMT_RGBA64BE ||
2032  s->avctx->pix_fmt == AV_PIX_FMT_GRAY16BE ||
2033  s->avctx->pix_fmt == AV_PIX_FMT_YA16BE ||
2034  s->avctx->pix_fmt == AV_PIX_FMT_GBRP16BE ||
2035  s->avctx->pix_fmt == AV_PIX_FMT_GBRAP16BE) {
2036  for (i = 0; i < decoded_height; i++) {
2037  for (j = soff; j < ssize; j += 2)
2038  AV_WB16(dst + j, AV_RB16(dst + j) + AV_RB16(dst + j - soff));
2039  dst += stride;
2040  }
2041  } else {
2042  for (i = 0; i < decoded_height; i++) {
2043  for (j = soff; j < ssize; j++)
2044  dst[j] += dst[j - soff];
2045  dst += stride;
2046  }
2047  }
2048  }
2049 
2050  if (s->photometric == TIFF_PHOTOMETRIC_WHITE_IS_ZERO) {
2051  int c = (s->avctx->pix_fmt == AV_PIX_FMT_PAL8 ? (1<<s->bpp) - 1 : 255);
2052  dst = p->data[plane];
2053  for (i = 0; i < s->height; i++) {
2054  for (j = 0; j < stride; j++)
2055  dst[j] = c - dst[j];
2056  dst += stride;
2057  }
2058  }
2059 
2060  if (s->photometric == TIFF_PHOTOMETRIC_SEPARATED &&
2061  (s->avctx->pix_fmt == AV_PIX_FMT_RGB0 || s->avctx->pix_fmt == AV_PIX_FMT_RGBA)) {
2062  int x = s->avctx->pix_fmt == AV_PIX_FMT_RGB0 ? 4 : 5;
2063  uint8_t *src = five_planes ? five_planes : p->data[plane];
2064  dst = p->data[plane];
2065  for (i = 0; i < s->height; i++) {
2066  for (j = 0; j < s->width; j++) {
2067  int k = 255 - src[x * j + 3];
2068  int r = (255 - src[x * j ]) * k;
2069  int g = (255 - src[x * j + 1]) * k;
2070  int b = (255 - src[x * j + 2]) * k;
2071  dst[4 * j ] = r * 257 >> 16;
2072  dst[4 * j + 1] = g * 257 >> 16;
2073  dst[4 * j + 2] = b * 257 >> 16;
2074  dst[4 * j + 3] = s->avctx->pix_fmt == AV_PIX_FMT_RGBA ? src[x * j + 4] : 255;
2075  }
2076  src += stride;
2077  dst += p->linesize[plane];
2078  }
2079  av_freep(&five_planes);
2080  } else if (s->photometric == TIFF_PHOTOMETRIC_SEPARATED &&
2081  s->avctx->pix_fmt == AV_PIX_FMT_RGBA64BE) {
2082  dst = p->data[plane];
2083  for (i = 0; i < s->height; i++) {
2084  for (j = 0; j < s->width; j++) {
2085  uint64_t k = 65535 - AV_RB16(dst + 8 * j + 6);
2086  uint64_t r = (65535 - AV_RB16(dst + 8 * j )) * k;
2087  uint64_t g = (65535 - AV_RB16(dst + 8 * j + 2)) * k;
2088  uint64_t b = (65535 - AV_RB16(dst + 8 * j + 4)) * k;
2089  AV_WB16(dst + 8 * j , r * 65537 >> 32);
2090  AV_WB16(dst + 8 * j + 2, g * 65537 >> 32);
2091  AV_WB16(dst + 8 * j + 4, b * 65537 >> 32);
2092  AV_WB16(dst + 8 * j + 6, 65535);
2093  }
2094  dst += p->linesize[plane];
2095  }
2096  }
2097  }
2098 
2099  if (s->planar && s->bppcount > 2) {
2100  FFSWAP(uint8_t*, p->data[0], p->data[2]);
2101  FFSWAP(int, p->linesize[0], p->linesize[2]);
2102  FFSWAP(uint8_t*, p->data[0], p->data[1]);
2103  FFSWAP(int, p->linesize[0], p->linesize[1]);
2104  }
2105 
2106  if (s->is_bayer && s->white_level && s->bpp == 16 && !is_dng) {
2107  uint16_t *dst = (uint16_t *)p->data[0];
2108  for (i = 0; i < s->height; i++) {
2109  for (j = 0; j < s->width; j++)
2110  dst[j] = FFMIN((dst[j] / (float)s->white_level) * 65535, 65535);
2111  dst += stride / 2;
2112  }
2113  }
2114 
2115  *got_frame = 1;
2116 
2117  return avpkt->size;
2118 }
2119 
2121 {
2122  TiffContext *s = avctx->priv_data;
2123  const AVCodec *codec;
2124  int ret;
2125 
2126  s->width = 0;
2127  s->height = 0;
2128  s->subsampling[0] =
2129  s->subsampling[1] = 1;
2130  s->avctx = avctx;
2131  ff_lzw_decode_open(&s->lzw);
2132  if (!s->lzw)
2133  return AVERROR(ENOMEM);
2135 
2136  /* Allocate JPEG frame */
2137  s->jpgframe = av_frame_alloc();
2138  s->jpkt = av_packet_alloc();
2139  if (!s->jpgframe || !s->jpkt)
2140  return AVERROR(ENOMEM);
2141 
2142  /* Prepare everything needed for JPEG decoding */
2144  if (!codec)
2145  return AVERROR_BUG;
2146  s->avctx_mjpeg = avcodec_alloc_context3(codec);
2147  if (!s->avctx_mjpeg)
2148  return AVERROR(ENOMEM);
2149  s->avctx_mjpeg->flags = avctx->flags;
2150  s->avctx_mjpeg->flags2 = avctx->flags2;
2151  s->avctx_mjpeg->dct_algo = avctx->dct_algo;
2152  s->avctx_mjpeg->idct_algo = avctx->idct_algo;
2153  s->avctx_mjpeg->max_pixels = avctx->max_pixels;
2154  ret = avcodec_open2(s->avctx_mjpeg, codec, NULL);
2155  if (ret < 0) {
2156  return ret;
2157  }
2158 
2159  return 0;
2160 }
2161 
2162 static av_cold int tiff_end(AVCodecContext *avctx)
2163 {
2164  TiffContext *const s = avctx->priv_data;
2165 
2166  free_geotags(s);
2167 
2168  ff_lzw_decode_close(&s->lzw);
2169  av_freep(&s->deinvert_buf);
2170  s->deinvert_buf_size = 0;
2171  av_freep(&s->yuv_line);
2172  s->yuv_line_size = 0;
2173  av_freep(&s->fax_buffer);
2174  s->fax_buffer_size = 0;
2175  av_frame_free(&s->jpgframe);
2176  av_packet_free(&s->jpkt);
2177  avcodec_free_context(&s->avctx_mjpeg);
2178  return 0;
2179 }
2180 
2181 #define OFFSET(x) offsetof(TiffContext, x)
2182 static const AVOption tiff_options[] = {
2183  { "subimage", "decode subimage instead if available", OFFSET(get_subimage), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM },
2184  { "thumbnail", "decode embedded thumbnail subimage instead if available", OFFSET(get_thumbnail), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM },
2185  { "page", "page number of multi-page image to decode (starting from 1)", OFFSET(get_page), AV_OPT_TYPE_INT, {.i64=0}, 0, UINT16_MAX, AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM },
2186  { NULL },
2187 };
2188 
2189 static const AVClass tiff_decoder_class = {
2190  .class_name = "TIFF decoder",
2191  .item_name = av_default_item_name,
2192  .option = tiff_options,
2193  .version = LIBAVUTIL_VERSION_INT,
2194 };
2195 
2197  .name = "tiff",
2198  .long_name = NULL_IF_CONFIG_SMALL("TIFF image"),
2199  .type = AVMEDIA_TYPE_VIDEO,
2200  .id = AV_CODEC_ID_TIFF,
2201  .priv_data_size = sizeof(TiffContext),
2202  .init = tiff_init,
2203  .close = tiff_end,
2204  .decode = decode_frame,
2205  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
2207  .priv_class = &tiff_decoder_class,
2208 };
TiffContext::tiff_type
enum TiffType tiff_type
Definition: tiff.c:68
AVFrame::color_trc
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:566
av_packet_unref
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:634
ff_tadd_string_metadata
int ff_tadd_string_metadata(int count, const char *name, GetByteContext *gb, int le, AVDictionary **metadata)
Adds a string of count characters into the metadata dictionary.
Definition: tiff_common.c:241
TiffContext::gb
GetByteContext gb
Definition: tiff.c:57
AVCodec
AVCodec.
Definition: codec.h:197
stride
int stride
Definition: mace.c:144
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
FF_CODEC_CAP_INIT_THREADSAFE
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:41
TIFF_GEOG_LINEAR_UNITS_GEOKEY
@ TIFF_GEOG_LINEAR_UNITS_GEOKEY
Definition: tiff.h:142
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
bytestream2_get_eof
static av_always_inline unsigned int bytestream2_get_eof(PutByteContext *p)
Definition: bytestream.h:332
init
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:31
r
const char * r
Definition: vf_curves.c:116
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
AV_PIX_FMT_YA8
@ AV_PIX_FMT_YA8
8 bits gray, 8 bits alpha
Definition: pixfmt.h:143
get_geokey_type
static int get_geokey_type(int key)
Definition: tiff.c:149
AV_OPT_FLAG_VIDEO_PARAM
#define AV_OPT_FLAG_VIDEO_PARAM
Definition: opt.h:281
tiff_decode_tag
static int tiff_decode_tag(TiffContext *s, AVFrame *frame)
Definition: tiff.c:1225
elements
static const ElemCat * elements[ELEMENT_COUNT]
Definition: signature.h:566
TIFF_PHOTOMETRIC_ICC_LAB
@ TIFF_PHOTOMETRIC_ICC_LAB
Definition: tiff.h:193
FFSWAP
#define FFSWAP(type, a, b)
Definition: common.h:108
TIFF_JPEG
@ TIFF_JPEG
Definition: tiff.h:126
GetByteContext
Definition: bytestream.h:33
AV_PIX_FMT_GBRP16BE
@ AV_PIX_FMT_GBRP16BE
planar GBR 4:4:4 48bpp, big-endian
Definition: pixfmt.h:174
get_geokey_val
static char * get_geokey_val(int key, int val)
Definition: tiff.c:173
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2573
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:55
TiffContext::dng_lut
uint16_t dng_lut[65536]
Definition: tiff.c:89
AVCOL_TRC_LINEAR
@ AVCOL_TRC_LINEAR
"Linear transfer characteristics"
Definition: pixfmt.h:492
dng_process_color16
static uint16_t av_always_inline dng_process_color16(uint16_t value, const uint16_t *lut, uint16_t black_level, float scale_factor)
Map stored raw sensor values into linear reference values (see: DNG Specification - Chapter 5)
Definition: tiff.c:281
TiffContext::strippos
int strippos
Definition: tiff.c:96
TIFF_CFA_PATTERN_DIM
@ TIFF_CFA_PATTERN_DIM
Definition: tiff.h:89
init_image
static int init_image(TiffContext *s, ThreadFrame *frame)
Definition: tiff.c:1033
ff_tiff_decoder
AVCodec ff_tiff_decoder
Definition: tiff.c:2196
TIFF_PROJ_COORD_TRANS_GEOKEY
@ TIFF_PROJ_COORD_TRANS_GEOKEY
Definition: tiff.h:155
OFFSET
#define OFFSET(x)
Definition: tiff.c:2181
AVCodecContext::err_recognition
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:1645
av_frame_new_side_data
AVFrameSideData * av_frame_new_side_data(AVFrame *frame, enum AVFrameSideDataType type, buffer_size_t size)
Add a new side data to a frame.
Definition: frame.c:726
TiffContext::sot
int sot
Definition: tiff.c:95
doubles2str
static char * doubles2str(double *dp, int count, const char *sep)
Definition: tiff.c:241
TiffContext::fax_buffer_size
unsigned int fax_buffer_size
Definition: tiff.c:112
tiff_projection_codes
static const TiffGeoTagKeyName tiff_projection_codes[]
Definition: tiff_data.h:1517
TIFF_CCITT_RLE
@ TIFF_CCITT_RLE
Definition: tiff.h:122
TIFF_GEOG_AZIMUTH_UNITS_GEOKEY
@ TIFF_GEOG_AZIMUTH_UNITS_GEOKEY
Definition: tiff.h:150
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
mjpegdec.h
bytestream2_seek
static av_always_inline int bytestream2_seek(GetByteContext *g, int offset, int whence)
Definition: bytestream.h:212
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
AV_PIX_FMT_RGBA64BE
@ AV_PIX_FMT_RGBA64BE
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
Definition: pixfmt.h:205
tiff_end
static av_cold int tiff_end(AVCodecContext *avctx)
Definition: tiff.c:2162
w
uint8_t w
Definition: llviddspenc.c:39
TiffContext::tile_offsets_offset
int tile_offsets_offset
Definition: tiff.c:101
TIFF_ADOBE_DEFLATE
@ TIFF_ADOBE_DEFLATE
Definition: tiff.h:128
internal.h
TIFF_COPYRIGHT
@ TIFF_COPYRIGHT
Definition: tiff.h:91
AVPacket::data
uint8_t * data
Definition: packet.h:369
TIFF_PHOTOMETRIC_ITU_LAB
@ TIFF_PHOTOMETRIC_ITU_LAB
Definition: tiff.h:194
AVOption
AVOption.
Definition: opt.h:248
TIFF_LONG
@ TIFF_LONG
Definition: tiff_common.h:41
b
#define b
Definition: input.c:41
ff_reverse
const uint8_t ff_reverse[256]
Definition: reverse.c:23
data
const char data[16]
Definition: mxf.c:142
RET_GEOKEY_VAL
#define RET_GEOKEY_VAL(TYPE, array)
TIFF_NEWJPEG
@ TIFF_NEWJPEG
Definition: tiff.h:127
av_mallocz_array
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:190
deinvert_buffer
static int deinvert_buffer(TiffContext *s, const uint8_t *src, int size)
Definition: tiff.c:429
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
ff_lzw_decode
int ff_lzw_decode(LZWState *p, uint8_t *buf, int len)
Decode given number of bytes NOTE: the algorithm here is inspired from the LZW GIF decoder written by...
Definition: lzw.c:169
TIFF_ROWSPERSTRIP
@ TIFF_ROWSPERSTRIP
Definition: tiff.h:61
TiffContext::pattern
uint8_t pattern[4]
Definition: tiff.c:86
TIFF_GEOG_ELLIPSOID_GEOKEY
@ TIFF_GEOG_ELLIPSOID_GEOKEY
Definition: tiff.h:146
TIFF_GEO_KEY_USER_DEFINED
#define TIFF_GEO_KEY_USER_DEFINED
Definition: tiff_data.h:97
TIFF_PROJECTION_GEOKEY
@ TIFF_PROJECTION_GEOKEY
Definition: tiff.h:154
TIFF_PROJ_LINEAR_UNITS_GEOKEY
@ TIFF_PROJ_LINEAR_UNITS_GEOKEY
Definition: tiff.h:156
TIFF_RAW
@ TIFF_RAW
Definition: tiff.h:121
ff_lzw_decode_close
av_cold void ff_lzw_decode_close(LZWState **p)
Definition: lzw.c:118
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
av_clip_uint16_c
static av_always_inline av_const uint16_t av_clip_uint16_c(int a)
Clip a signed integer value into the 0-65535 range.
Definition: common.h:254
TIFF_GEO_DOUBLE_PARAMS
@ TIFF_GEO_DOUBLE_PARAMS
Definition: tiff.h:97
AV_PIX_FMT_BAYER_GRBG16
#define AV_PIX_FMT_BAYER_GRBG16
Definition: pixfmt.h:426
TiffGeoTagKeyName
Definition: tiff.h:215
TIFF_PHOTOMETRIC_WHITE_IS_ZERO
@ TIFF_PHOTOMETRIC_WHITE_IS_ZERO
Definition: tiff.h:185
thread.h
TIFF_PACKBITS
@ TIFF_PACKBITS
Definition: tiff.h:129
TIFF_GEOG_PRIME_MERIDIAN_GEOKEY
@ TIFF_GEOG_PRIME_MERIDIAN_GEOKEY
Definition: tiff.h:141
av_packet_free
void av_packet_free(AVPacket **pkt)
Free the packet, if the packet is reference counted, it will be unreferenced first.
Definition: avpacket.c:75
TiffContext::is_jpeg
int is_jpeg
Definition: tiff.c:105
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:31
TIFF_GEO_KEY_UNDEFINED
#define TIFF_GEO_KEY_UNDEFINED
Definition: tiff_data.h:96
tiff_options
static const AVOption tiff_options[]
Definition: tiff.c:2182
TiffContext::get_thumbnail
int get_thumbnail
Definition: tiff.c:66
TIFF_PHOTOMETRIC_LINEAR_RAW
@ TIFF_PHOTOMETRIC_LINEAR_RAW
Definition: tiff.h:198
TIFF_FILL_ORDER
@ TIFF_FILL_ORDER
Definition: tiff.h:54
TIFF_PHOTOMETRIC_ALPHA_MASK
@ TIFF_PHOTOMETRIC_ALPHA_MASK
Definition: tiff.h:189
TiffContext::deinvert_buf_size
int deinvert_buf_size
Definition: tiff.c:108
AV_PIX_FMT_GRAY16BE
@ AV_PIX_FMT_GRAY16BE
Y , 16bpp, big-endian.
Definition: pixfmt.h:97
bytestream2_skip
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:168
TIFF_DATE
@ TIFF_DATE
Definition: tiff.h:74
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:379
TIFF_TILE_BYTE_COUNTS
@ TIFF_TILE_BYTE_COUNTS
Definition: tiff.h:82
ff_ccitt_unpack
int ff_ccitt_unpack(AVCodecContext *avctx, const uint8_t *src, int srcsize, uint8_t *dst, int height, int stride, enum TiffCompr compr, int opts)
unpack data compressed with CCITT Group 3 1/2-D or Group 4 method
Definition: faxcompr.c:396
unpack_yuv
static void unpack_yuv(TiffContext *s, AVFrame *p, const uint8_t *src, int lnum)
Definition: tiff.c:455
AV_PIX_FMT_GBRAP
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:215
tiff_set_type
static void tiff_set_type(TiffContext *s, enum TiffType tiff_type)
Definition: tiff.c:118
U
#define U(x)
Definition: vp56_arith.h:37
dng_decode_tiles
static int dng_decode_tiles(AVCodecContext *avctx, AVFrame *frame, const AVPacket *avpkt)
Definition: tiff.c:963
inflate
static void inflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:198
TIFF_YCBCR_SUBSAMPLING
@ TIFF_YCBCR_SUBSAMPLING
Definition: tiff.h:86
TIFF_MAKE
@ TIFF_MAKE
Definition: tiff.h:57
GetBitContext
Definition: get_bits.h:61
ff_thread_get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames. The frames must then be freed with ff_thread_release_buffer(). Otherwise decode directly into the user-supplied frames. Call ff_thread_report_progress() after some part of the current picture has decoded. A good place to put this is where draw_horiz_band() is called - add this if it isn 't called anywhere
TIFF_GEOG_GEODETIC_DATUM_GEOKEY
@ TIFF_GEOG_GEODETIC_DATUM_GEOKEY
Definition: tiff.h:140
TiffContext::deinvert_buf
uint8_t * deinvert_buf
Definition: tiff.c:107
TiffContext::tile_length
int tile_length
Definition: tiff.c:102
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:616
TIFF_T6OPTIONS
@ TIFF_T6OPTIONS
Definition: tiff.h:70
val
static double val(void *priv, double ch)
Definition: aeval.c:76
horizontal_fill
static void av_always_inline horizontal_fill(TiffContext *s, unsigned int bpp, uint8_t *dst, int usePtr, const uint8_t *src, uint8_t c, int width, int offset)
Definition: tiff.c:374
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
AVCodecContext::dct_algo
int dct_algo
DCT algorithm, see FF_DCT_* below.
Definition: avcodec.h:1706
TIFF_VERTICAL_CS_TYPE_GEOKEY
@ TIFF_VERTICAL_CS_TYPE_GEOKEY
Definition: tiff.h:176
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:383
TIFF_SOFTWARE_NAME
@ TIFF_SOFTWARE_NAME
Definition: tiff.h:73
FF_LZW_TIFF
@ FF_LZW_TIFF
Definition: lzw.h:39
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
AVCOL_TRC_GAMMA22
@ AVCOL_TRC_GAMMA22
also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:488
TiffContext::geotags
TiffGeoTag * geotags
Definition: tiff.c:115
DNG_LINEARIZATION_TABLE
@ DNG_LINEARIZATION_TABLE
Definition: tiff.h:105
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
TIFF_SHORT
@ TIFF_SHORT
Definition: tiff_common.h:40
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
TiffGeoTag
Definition: tiff.h:207
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
TiffContext::rps
int rps
Definition: tiff.c:94
init_get_bits8
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:677
TIFF_SUBFILE
@ TIFF_SUBFILE
Definition: tiff.h:48
CINEMADNG_T_STOP
@ CINEMADNG_T_STOP
Definition: tiff.h:114
bytestream2_init_writer
static av_always_inline void bytestream2_init_writer(PutByteContext *p, uint8_t *buf, int buf_size)
Definition: bytestream.h:147
decode
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
AV_PIX_FMT_GBRAP16BE
@ AV_PIX_FMT_GBRAP16BE
planar GBRA 4:4:4:4 64bpp, big-endian
Definition: pixfmt.h:216
TiffContext::stripsize
int stripsize
Definition: tiff.c:96
avcodec_alloc_context3
AVCodecContext * avcodec_alloc_context3(const AVCodec *codec)
Allocate an AVCodecContext and set its fields to default values.
Definition: options.c:173
width
#define width
tiff_proj_cs_type_codes
static const TiffGeoTagKeyName tiff_proj_cs_type_codes[]
Definition: tiff_data.h:536
intreadwrite.h
TIFF_G4
@ TIFF_G4
Definition: tiff.h:124
s
#define s(width, name)
Definition: cbs_vp9.c:257
AV_PIX_FMT_GBRP16LE
@ AV_PIX_FMT_GBRP16LE
planar GBR 4:4:4 48bpp, little-endian
Definition: pixfmt.h:175
TiffContext::width
int width
Definition: tiff.c:69
AV_PIX_FMT_BAYER_BGGR8
@ AV_PIX_FMT_BAYER_BGGR8
bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples
Definition: pixfmt.h:260
g
const char * g
Definition: vf_curves.c:117
TiffType
TiffType
TIFF types in ascenting priority (last in the list is highest)
Definition: tiff.h:37
ff_lzw_decode_open
av_cold void ff_lzw_decode_open(LZWState **p)
Definition: lzw.c:113
TIFF_STRIP_SIZE
@ TIFF_STRIP_SIZE
Definition: tiff.h:62
avcodec_receive_frame
int avcodec_receive_frame(AVCodecContext *avctx, AVFrame *frame)
Return decoded output data from a decoder.
Definition: decode.c:652
TiffContext::yuv_line
uint8_t * yuv_line
Definition: tiff.c:109
TIFF_GEOGRAPHIC_TYPE_GEOKEY
@ TIFF_GEOGRAPHIC_TYPE_GEOKEY
Definition: tiff.h:138
dng_decode_jpeg
static int dng_decode_jpeg(AVCodecContext *avctx, AVFrame *frame, int tile_byte_count, int dst_x, int dst_y, int w, int h)
Definition: tiff.c:648
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
TIFF_STRING
@ TIFF_STRING
Definition: tiff_common.h:39
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:215
TIFF_PHOTOMETRIC_LOG_L
@ TIFF_PHOTOMETRIC_LOG_L
Definition: tiff.h:196
TiffContext::black_level
unsigned black_level
Definition: tiff.c:87
ff_tadd_shorts_metadata
int ff_tadd_shorts_metadata(int count, const char *name, const char *sep, GetByteContext *gb, int le, int is_signed, AVDictionary **metadata)
Adds count shorts converted to a string into the metadata dictionary.
Definition: tiff_common.c:178
get_bits.h
AV_RL16
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
Definition: bytestream.h:94
TiffContext::get_page
uint16_t get_page
Definition: tiff.c:65
LZWState
Definition: lzw.c:46
TIFF_IMAGE_DESCRIPTION
@ TIFF_IMAGE_DESCRIPTION
Definition: tiff.h:56
AVCodecContext::max_pixels
int64_t max_pixels
The number of pixels per image to maximally accept.
Definition: avcodec.h:2248
TiffContext::is_bayer
int is_bayer
Definition: tiff.c:85
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
key
const char * key
Definition: hwcontext_opencl.c:168
TiffContext::jpgframe
AVFrame * jpgframe
Definition: tiff.c:62
TiffContext::compr
enum TiffCompr compr
Definition: tiff.c:74
TiffContext::photometric
enum TiffPhotometric photometric
Definition: tiff.c:75
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
search_keyval
static const char * search_keyval(const TiffGeoTagKeyName *keys, int n, int id)
Definition: tiff.c:164
AV_PIX_FMT_BAYER_RGGB8
@ AV_PIX_FMT_BAYER_RGGB8
bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples
Definition: pixfmt.h:261
FFABS
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:72
AV_PIX_FMT_BAYER_BGGR16
#define AV_PIX_FMT_BAYER_BGGR16
Definition: pixfmt.h:423
if
if(ret)
Definition: filter_design.txt:179
ff_ccitt_unpack_init
av_cold void ff_ccitt_unpack_init(void)
initialize unpacker code
Definition: faxcompr.c:122
TiffContext::geotag_count
int geotag_count
Definition: tiff.c:114
TiffContext::height
int height
Definition: tiff.c:69
TIFF_PAGE_NAME
@ TIFF_PAGE_NAME
Definition: tiff.h:66
TIFF_VERTICAL_UNITS_GEOKEY
@ TIFF_VERTICAL_UNITS_GEOKEY
Definition: tiff.h:179
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:108
TIFF_LZW
@ TIFF_LZW
Definition: tiff.h:125
tiff_init
static av_cold int tiff_init(AVCodecContext *avctx)
Definition: tiff.c:2120
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
ff_tget_short
unsigned ff_tget_short(GetByteContext *gb, int le)
Reads a short from the bytestream using given endianness.
Definition: tiff_common.c:43
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
decode_frame
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: tiff.c:1755
NULL
#define NULL
Definition: coverity.c:32
AVERROR_PATCHWELCOME
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
TIFF_PHOTOMETRIC_YCBCR
@ TIFF_PHOTOMETRIC_YCBCR
Definition: tiff.h:191
TiffContext
Definition: tiff.c:54
AV_WB16
#define AV_WB16(p, v)
Definition: intreadwrite.h:405
TiffContext::is_thumbnail
int is_thumbnail
Definition: tiff.c:82
tiff_data.h
TiffContext::avctx
AVCodecContext * avctx
Definition: tiff.c:56
avcodec_free_context
void avcodec_free_context(AVCodecContext **avctx)
Free the codec context and everything associated with it and write NULL to the provided pointer.
Definition: options.c:188
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AV_PIX_FMT_RGB48LE
@ AV_PIX_FMT_RGB48LE
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as lit...
Definition: pixfmt.h:103
AV_PIX_FMT_YA16LE
@ AV_PIX_FMT_YA16LE
16 bits gray, 16 bits alpha (little-endian)
Definition: pixfmt.h:213
AV_PIX_FMT_MONOBLACK
@ AV_PIX_FMT_MONOBLACK
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb.
Definition: pixfmt.h:76
tiff.h
TIFF_PHOTOMETRIC_PALETTE
@ TIFF_PHOTOMETRIC_PALETTE
Definition: tiff.h:188
TiffContext::get_subimage
int get_subimage
Definition: tiff.c:64
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:235
AV_PIX_FMT_RGBA64LE
@ AV_PIX_FMT_RGBA64LE
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
Definition: pixfmt.h:206
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:274
TIFF_MODEL_TIEPOINT
@ TIFF_MODEL_TIEPOINT
Definition: tiff.h:92
src
#define src
Definition: vp8dsp.c:255
TIFF_PHOTOMETRIC_CIE_LAB
@ TIFF_PHOTOMETRIC_CIE_LAB
Definition: tiff.h:192
AV_FRAME_DATA_ICC_PROFILE
@ AV_FRAME_DATA_ICC_PROFILE
The data contains an ICC profile as an opaque octet buffer following the format described by ISO 1507...
Definition: frame.h:143
mathops.h
AV_PIX_FMT_BAYER_GBRG16
#define AV_PIX_FMT_BAYER_GBRG16
Definition: pixfmt.h:425
MJpegDecodeContext
Definition: mjpegdec.h:52
TIFF_PAL
@ TIFF_PAL
Definition: tiff.h:78
avcodec_open2
int attribute_align_arg avcodec_open2(AVCodecContext *avctx, const AVCodec *codec, AVDictionary **options)
Initialize the AVCodecContext to use the given AVCodec.
Definition: avcodec.c:144
TIFF_BYTE
@ TIFF_BYTE
Definition: tiff_common.h:38
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
TIFF_ARTIST
@ TIFF_ARTIST
Definition: tiff.h:75
AV_EF_EXPLODE
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: avcodec.h:1656
CINEMADNG_TIME_CODES
@ CINEMADNG_TIME_CODES
Definition: tiff.h:112
TIFF_SAMPLES_PER_PIXEL
@ TIFF_SAMPLES_PER_PIXEL
Definition: tiff.h:60
TIFF_G3
@ TIFF_G3
Definition: tiff.h:123
TIFF_WIDTH
@ TIFF_WIDTH
Definition: tiff.h:49
TIFF_TILE_OFFSETS
@ TIFF_TILE_OFFSETS
Definition: tiff.h:81
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
bytestream2_get_bytes_left
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
error.h
TiffContext::palette
uint32_t palette[256]
Definition: tiff.c:71
bytestream2_tell
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:192
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
PutByteContext
Definition: bytestream.h:37
ff_tread_tag
int ff_tread_tag(GetByteContext *gb, int le, unsigned *tag, unsigned *type, unsigned *count, int *next)
Reads the first 3 fields of a TIFF tag, which are the tag id, the tag type and the count of values fo...
Definition: tiff_common.c:286
AVCodecContext::flags2
int flags2
AV_CODEC_FLAG2_*.
Definition: avcodec.h:623
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:68
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:370
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
TIFF_TYPE_CINEMADNG
@ TIFF_TYPE_CINEMADNG
Digital Negative (DNG) image part of an CinemaDNG image sequence.
Definition: tiff.h:43
TiffContext::fax_buffer
uint8_t * fax_buffer
Definition: tiff.c:111
AV_PIX_FMT_FLAG_RGB
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
Definition: pixdesc.h:148
lzw.h
LZW decoding routines.
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:119
FFMAX
#define FFMAX(a, b)
Definition: common.h:103
TIFF_DOUBLE
@ TIFF_DOUBLE
Definition: tiff_common.h:49
bps
unsigned bps
Definition: movenc.c:1612
AV_PIX_FMT_YA16BE
@ AV_PIX_FMT_YA16BE
16 bits gray, 16 bits alpha (big-endian)
Definition: pixfmt.h:212
TIFF_GEO_ASCII_PARAMS
@ TIFF_GEO_ASCII_PARAMS
Definition: tiff.h:98
size
int size
Definition: twinvq_data.h:10344
avpriv_report_missing_feature
void avpriv_report_missing_feature(void *avc, const char *msg,...) av_printf_format(2
Log a generic warning message about a missing feature.
TiffContext::bpp
unsigned int bpp
Definition: tiff.c:70
AVFrameSideData::data
uint8_t * data
Definition: frame.h:222
TIFF_GT_MODEL_TYPE_GEOKEY
@ TIFF_GT_MODEL_TYPE_GEOKEY
Definition: tiff.h:135
TiffContext::jpkt
AVPacket * jpkt
Definition: tiff.c:61
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:391
TIFF_DOCUMENT_NAME
@ TIFF_DOCUMENT_NAME
Definition: tiff.h:55
TiffContext::fill_order
int fill_order
Definition: tiff.c:80
TIFF_MODEL_TRANSFORMATION
@ TIFF_MODEL_TRANSFORMATION
Definition: tiff.h:94
TIFF_TILE_LENGTH
@ TIFF_TILE_LENGTH
Definition: tiff.h:80
TIFF_MODEL
@ TIFF_MODEL
Definition: tiff.h:58
AV_WL16
#define AV_WL16(p, v)
Definition: intreadwrite.h:412
height
#define height
FFMIN
#define FFMIN(a, b)
Definition: common.h:105
TiffContext::white_level
unsigned white_level
Definition: tiff.c:88
TiffContext::stripsizesoff
int stripsizesoff
Definition: tiff.c:96
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
line
Definition: graph2dot.c:48
attributes.h
av_packet_alloc
AVPacket * av_packet_alloc(void)
Allocate an AVPacket and set its fields to default values.
Definition: avpacket.c:64
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:238
dng_blit
static void av_always_inline dng_blit(TiffContext *s, uint8_t *dst, int dst_stride, const uint8_t *src, int src_stride, int width, int height, int is_single_comp, int is_u16)
Definition: tiff.c:311
TiffContext::planar
int planar
Definition: tiff.c:76
TIFF_COMPR
@ TIFF_COMPR
Definition: tiff.h:52
TIFF_HEIGHT
@ TIFF_HEIGHT
Definition: tiff.h:50
cmp_id_key
static int cmp_id_key(const void *id, const void *k)
Definition: tiff.c:159
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:205
tiff_decoder_class
static const AVClass tiff_decoder_class
Definition: tiff.c:2189
RET_GEOKEY
#define RET_GEOKEY(TYPE, array, element)
Definition: tiff.c:134
DNG_BLACK_LEVEL
@ DNG_BLACK_LEVEL
Definition: tiff.h:106
TIFF_T4OPTIONS
@ TIFF_T4OPTIONS
Definition: tiff.h:69
TIFF_PHOTOMETRIC_LOG_LUV
@ TIFF_PHOTOMETRIC_LOG_LUV
Definition: tiff.h:197
TiffContext::le
int le
Definition: tiff.c:73
AV_CODEC_ID_MJPEG
@ AV_CODEC_ID_MJPEG
Definition: codec_id.h:56
CINEMADNG_REEL_NAME
@ CINEMADNG_REEL_NAME
Definition: tiff.h:115
avcodec_send_packet
int avcodec_send_packet(AVCodecContext *avctx, const AVPacket *avpkt)
Supply raw packet data as input to a decoder.
Definition: decode.c:589
TiffContext::subsampling
int subsampling[2]
Definition: tiff.c:77
TIFF_PAGE_NUMBER
@ TIFF_PAGE_NUMBER
Definition: tiff.h:72
i
int i
Definition: input.c:407
AV_PIX_FMT_RGB48BE
@ AV_PIX_FMT_RGB48BE
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:102
TIFF_PHOTOMETRIC_CFA
@ TIFF_PHOTOMETRIC_CFA
Definition: tiff.h:195
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
ff_tget_long
unsigned ff_tget_long(GetByteContext *gb, int le)
Reads a long from the bytestream using given endianness.
Definition: tiff_common.c:49
TIFF_PHOTOMETRIC_BLACK_IS_ZERO
@ TIFF_PHOTOMETRIC_BLACK_IS_ZERO
Definition: tiff.h:186
TiffContext::tile_width
int tile_width
Definition: tiff.c:102
TiffContext::fax_opts
int fax_opts
Definition: tiff.c:78
ff_lzw_decode_init
int ff_lzw_decode_init(LZWState *p, int csize, const uint8_t *buf, int buf_size, int mode)
Initialize LZW decoder.
Definition: lzw.c:131
TiffContext::bppcount
unsigned int bppcount
Definition: tiff.c:70
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:49
unpack_gray
static void unpack_gray(TiffContext *s, AVFrame *p, const uint8_t *src, int lnum, int width, int bpp)
Definition: tiff.c:442
TiffContext::res
uint32_t res[4]
Definition: tiff.c:81
TIFF_MODEL_PIXEL_SCALE
@ TIFF_MODEL_PIXEL_SCALE
Definition: tiff.h:93
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
TIFF_PLANAR
@ TIFF_PLANAR
Definition: tiff.h:65
TiffContext::tile_count
int tile_count
Definition: tiff.c:103
AV_PIX_FMT_BAYER_GBRG8
@ AV_PIX_FMT_BAYER_GBRG8
bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples
Definition: pixfmt.h:262
TIFF_TYPE_TIFF
@ TIFF_TYPE_TIFF
TIFF image based on the TIFF 6.0 or TIFF/EP (ISO 12234-2) specifications.
Definition: tiff.h:39
av_fast_padded_malloc
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:50
av_always_inline
#define av_always_inline
Definition: attributes.h:49
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
AV_OPT_FLAG_DECODING_PARAM
#define AV_OPT_FLAG_DECODING_PARAM
a generic parameter which can be set by the user for demuxing or decoding
Definition: opt.h:279
MJpegDecodeContext::bayer
int bayer
Definition: mjpegdec.h:75
uint8_t
uint8_t
Definition: audio_convert.c:194
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
AVCodecContext::idct_algo
int idct_algo
IDCT algorithm, see FF_IDCT_* below.
Definition: avcodec.h:1719
TIFF_TYPE_DNG
@ TIFF_TYPE_DNG
Digital Negative (DNG) image.
Definition: tiff.h:41
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:204
DNG_VERSION
@ DNG_VERSION
Definition: tiff.h:103
TiffContext::stripoff
int stripoff
Definition: tiff.c:96
len
int len
Definition: vorbis_enc_data.h:452
TIFF_PHOTOMETRIC_NONE
@ TIFF_PHOTOMETRIC_NONE
Definition: tiff.h:184
TIFF_CFA_PATTERN
@ TIFF_CFA_PATTERN
Definition: tiff.h:90
TIFF_STRIP_OFFS
@ TIFF_STRIP_OFFS
Definition: tiff.h:59
TIFF_TILE_WIDTH
@ TIFF_TILE_WIDTH
Definition: tiff.h:79
avcodec.h
pv
#define pv
Definition: regdef.h:60
AV_PIX_FMT_GBRAP16LE
@ AV_PIX_FMT_GBRAP16LE
planar GBRA 4:4:4:4 64bpp, little-endian
Definition: pixfmt.h:217
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
tag
uint32_t tag
Definition: movenc.c:1611
ret
ret
Definition: filter_design.txt:187
TIFF_HOST_COMPUTER
@ TIFF_HOST_COMPUTER
Definition: tiff.h:76
DNG_WHITE_LEVEL
@ DNG_WHITE_LEVEL
Definition: tiff.h:107
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
avcodec_find_decoder
AVCodec * avcodec_find_decoder(enum AVCodecID id)
Find a registered decoder with a matching codec ID.
Definition: allcodecs.c:946
TiffContext::palette_is_set
int palette_is_set
Definition: tiff.c:72
TIFF_BPP
@ TIFF_BPP
Definition: tiff.h:51
pos
unsigned int pos
Definition: spdifenc.c:412
get_geokey_name
static const char * get_geokey_name(int key)
Definition: tiff.c:139
AV_INPUT_BUFFER_PADDING_SIZE
#define AV_INPUT_BUFFER_PADDING_SIZE
Definition: avcodec.h:215
TIFF_PHOTOMETRIC
@ TIFF_PHOTOMETRIC
Definition: tiff.h:53
AV_RL32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:92
ff_tget_double
double ff_tget_double(GetByteContext *gb, int le)
Reads a double from the bytestream using given endianness.
Definition: tiff_common.c:55
TiffPhotometric
TiffPhotometric
list of TIFF, TIFF/AP and DNG PhotometricInterpretation (TIFF_PHOTOMETRIC) values
Definition: tiff.h:183
TiffContext::last_tag
unsigned last_tag
Definition: tiff.c:83
AVCodecContext
main external API structure.
Definition: avcodec.h:536
ADD_METADATA
#define ADD_METADATA(count, name, sep)
ThreadFrame
Definition: thread.h:34
TiffContext::sstype
int sstype
Definition: tiff.c:94
again
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining again
Definition: filter_design.txt:25
TIFF_PREDICTOR
@ TIFF_PREDICTOR
Definition: tiff.h:77
TIFF_RATIONAL
@ TIFF_RATIONAL
Definition: tiff_common.h:42
bytestream2_seek_p
static av_always_inline int bytestream2_seek_p(PutByteContext *p, int offset, int whence)
Definition: bytestream.h:236
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
AVFrame::metadata
AVDictionary * metadata
metadata.
Definition: frame.h:604
TiffContext::lzw
LZWState * lzw
Definition: tiff.c:97
set_sar
static void set_sar(TiffContext *s, unsigned tag, unsigned num, unsigned den)
Definition: tiff.c:1206
TIFF_LZMA
@ TIFF_LZMA
Definition: tiff.h:131
tiff_unpack_fax
static int tiff_unpack_fax(TiffContext *s, uint8_t *dst, int stride, const uint8_t *src, int size, int width, int lines)
Definition: tiff.c:615
TIFF_GEO_KEY_DIRECTORY
@ TIFF_GEO_KEY_DIRECTORY
Definition: tiff.h:96
CINEMADNG_CAMERA_LABEL
@ CINEMADNG_CAMERA_LABEL
Definition: tiff.h:116
TiffContext::is_tiled
int is_tiled
Definition: tiff.c:100
AV_PIX_FMT_FLAG_PLANAR
#define AV_PIX_FMT_FLAG_PLANAR
At least one pixel component is not in the first data plane.
Definition: pixdesc.h:144
ff_tdecode_header
int ff_tdecode_header(GetByteContext *gb, int *le, int *ifd_offset)
Decodes a TIFF header from the input bytestream and sets the endianness in *le and the offset to the ...
Definition: tiff_common.c:261
TIFF_YRES
@ TIFF_YRES
Definition: tiff.h:64
dng_process_color8
static uint16_t av_always_inline dng_process_color8(uint16_t value, const uint16_t *lut, uint16_t black_level, float scale_factor)
Definition: tiff.c:303
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
shift
static int shift(int a, int b)
Definition: sonic.c:82
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:168
planes
static const struct @322 planes[]
TIFF_ICC_PROFILE
@ TIFF_ICC_PROFILE
Definition: tiff.h:95
faxcompr.h
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:253
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_GRAY16LE
@ AV_PIX_FMT_GRAY16LE
Y , 16bpp, little-endian.
Definition: pixfmt.h:98
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:84
bytestream2_get_bufferu
static av_always_inline unsigned int bytestream2_get_bufferu(GetByteContext *g, uint8_t *dst, unsigned int size)
Definition: bytestream.h:277
avpriv_request_sample
#define avpriv_request_sample(...)
Definition: tableprint_vlc.h:39
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:220
free_geotags
static void free_geotags(TiffContext *const s)
Definition: tiff.c:123
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
TIFF_DEFLATE
@ TIFF_DEFLATE
Definition: tiff.h:130
TIFF_PHOTOMETRIC_RGB
@ TIFF_PHOTOMETRIC_RGB
Definition: tiff.h:187
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:563
AVPacket
This structure stores compressed data.
Definition: packet.h:346
TIFF_SUB_IFDS
@ TIFF_SUB_IFDS
Definition: tiff.h:83
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:242
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
tiff_unpack_strip
static int tiff_unpack_strip(TiffContext *s, AVFrame *p, uint8_t *dst, int stride, const uint8_t *src, int size, int strip_start, int lines)
Definition: tiff.c:738
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
TiffContext::tile_byte_counts_offset
int tile_byte_counts_offset
Definition: tiff.c:101
ff_tadd_doubles_metadata
int ff_tadd_doubles_metadata(int count, const char *name, const char *sep, GetByteContext *gb, int le, AVDictionary **metadata)
Adds count doubles converted to a string into the metadata dictionary.
Definition: tiff_common.c:147
TiffContext::avctx_mjpeg
AVCodecContext * avctx_mjpeg
Definition: tiff.c:60
TIFF_XRES
@ TIFF_XRES
Definition: tiff.h:63
add_metadata
static int add_metadata(int count, int type, const char *name, const char *sep, TiffContext *s, AVFrame *frame)
Definition: tiff.c:267
bytestream.h
imgutils.h
bytestream2_init
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
TiffCompr
TiffCompr
list of TIFF, TIFF/EP and DNG compression types
Definition: tiff.h:120
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
TIFF_GEOG_ANGULAR_UNITS_GEOKEY
@ TIFF_GEOG_ANGULAR_UNITS_GEOKEY
Definition: tiff.h:144
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
TiffContext::cur_page
uint16_t cur_page
Definition: tiff.c:92
h
h
Definition: vp9dsp_template.c:2038
AV_CODEC_ID_TIFF
@ AV_CODEC_ID_TIFF
Definition: codec_id.h:145
avstring.h
type_sizes
static const uint8_t type_sizes[14]
sizes of various TIFF field types (string size = 100)
Definition: tiff_common.h:54
AV_PIX_FMT_GRAY12
#define AV_PIX_FMT_GRAY12
Definition: pixfmt.h:381
TiffContext::predictor
int predictor
Definition: tiff.c:79
AV_PIX_FMT_BAYER_RGGB16
#define AV_PIX_FMT_BAYER_RGGB16
Definition: pixfmt.h:424
int
int
Definition: ffmpeg_filter.c:170
snprintf
#define snprintf
Definition: snprintf.h:34
ff_tget
unsigned ff_tget(GetByteContext *gb, int type, int le)
Reads a byte from the bytestream using given endianness.
Definition: tiff_common.c:62
TIFF_PHOTOMETRIC_SEPARATED
@ TIFF_PHOTOMETRIC_SEPARATED
Definition: tiff.h:190
TiffContext::strips
int strips
Definition: tiff.c:94
TIFF_PROJECTED_CS_TYPE_GEOKEY
@ TIFF_PROJECTED_CS_TYPE_GEOKEY
Definition: tiff.h:152
CINEMADNG_FRAME_RATE
@ CINEMADNG_FRAME_RATE
Definition: tiff.h:113
TiffContext::sub_ifd
uint32_t sub_ifd
Definition: tiff.c:91
AV_PIX_FMT_BAYER_GRBG8
@ AV_PIX_FMT_BAYER_GRBG8
bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples
Definition: pixfmt.h:263
line
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted line
Definition: swscale.txt:40
TiffContext::yuv_line_size
unsigned int yuv_line_size
Definition: tiff.c:110
AV_RB16
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_WB32 unsigned int_TMPL AV_WB24 unsigned int_TMPL AV_RB16
Definition: bytestream.h:98
TIFF_GT_RASTER_TYPE_GEOKEY
@ TIFF_GT_RASTER_TYPE_GEOKEY
Definition: tiff.h:136