FFmpeg
sga.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2021 Paul B Mahol
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include "libavutil/common.h"
22 #include "avcodec.h"
23 #include "get_bits.h"
24 #include "bytestream.h"
25 #include "codec_internal.h"
26 #include "decode.h"
27 
28 #define PALDATA_FOLLOWS_TILEDATA 4
29 #define HAVE_COMPRESSED_TILEMAP 32
30 #define HAVE_TILEMAP 128
31 
32 typedef struct SGAVideoContext {
34 
44 
45  int flags;
46  int nb_pal;
47  int nb_tiles;
49  int shift;
50  int plus;
51  int swap;
52 
53  uint32_t pal[256];
54  uint8_t *tileindex_data;
55  unsigned tileindex_size;
56  uint8_t *palmapindex_data;
57  unsigned palmapindex_size;
58  uint8_t uncompressed[65536];
60 
62 {
63  avctx->pix_fmt = AV_PIX_FMT_PAL8;
64  return 0;
65 }
66 
67 static int decode_palette(GetByteContext *gb, uint32_t *pal)
68 {
69  GetBitContext gbit;
70 
71  if (bytestream2_get_bytes_left(gb) < 18)
72  return AVERROR_INVALIDDATA;
73 
74  memset(pal, 0, 16 * sizeof(*pal));
75  init_get_bits8(&gbit, gb->buffer, 18);
76 
77  for (int RGBIndex = 0; RGBIndex < 3; RGBIndex++) {
78  for (int index = 0; index < 16; index++) {
79  unsigned color = get_bits1(&gbit) << RGBIndex;
80  pal[15 - index] |= color << (5 + 16);
81  }
82  }
83 
84  for (int RGBIndex = 0; RGBIndex < 3; RGBIndex++) {
85  for (int index = 0; index < 16; index++) {
86  unsigned color = get_bits1(&gbit) << RGBIndex;
87  pal[15 - index] |= color << (5 + 8);
88  }
89  }
90 
91  for (int RGBIndex = 0; RGBIndex < 3; RGBIndex++) {
92  for (int index = 0; index < 16; index++) {
93  unsigned color = get_bits1(&gbit) << RGBIndex;
94  pal[15 - index] |= color << (5 + 0);
95  }
96  }
97 
98  for (int index = 0; index < 16; index++)
99  pal[index] = (0xFFU << 24) | pal[index] | (pal[index] >> 3);
100 
101  bytestream2_skip(gb, 18);
102 
103  return 0;
104 }
105 
107 {
108  const uint8_t *tt = s->tileindex_data;
109 
110  for (int y = 0; y < s->tiles_h; y++) {
111  for (int x = 0; x < s->tiles_w; x++) {
112  int pal_idx = s->palmapindex_data[y * s->tiles_w + x] * 16;
113  uint8_t *dst = frame->data[0] + y * 8 * frame->linesize[0] + x * 8;
114 
115  for (int yy = 0; yy < 8; yy++) {
116  for (int xx = 0; xx < 8; xx++)
117  dst[xx] = pal_idx + tt[xx];
118  tt += 8;
119 
120  dst += frame->linesize[0];
121  }
122  }
123  }
124 
125  return 0;
126 }
127 
129 {
130  GetByteContext *gb = &s->gb, gb2;
131 
132  bytestream2_seek(gb, s->tilemapdata_offset, SEEK_SET);
133  if (bytestream2_get_bytes_left(gb) < s->tilemapdata_size)
134  return AVERROR_INVALIDDATA;
135 
136  gb2 = *gb;
137 
138  for (int y = 0; y < s->tiles_h; y++) {
139  for (int x = 0; x < s->tiles_w; x++) {
140  uint8_t tile[64];
141  int tilemap = bytestream2_get_be16u(&gb2);
142  int flip_x = (tilemap >> 11) & 1;
143  int flip_y = (tilemap >> 12) & 1;
144  int tindex = av_clip((tilemap & 511) - 1, 0, s->nb_tiles - 1);
145  const uint8_t *tt = s->tileindex_data + tindex * 64;
146  int pal_idx = ((tilemap >> 13) & 3) * 16;
147  uint8_t *dst = frame->data[0] + y * 8 * frame->linesize[0] + x * 8;
148 
149  if (!flip_x && !flip_y) {
150  memcpy(tile, tt, 64);
151  } else if (flip_x && flip_y) {
152  for (int i = 0; i < 8; i++) {
153  for (int j = 0; j < 8; j++)
154  tile[i * 8 + j] = tt[(7 - i) * 8 + 7 - j];
155  }
156  } else if (flip_x) {
157  for (int i = 0; i < 8; i++) {
158  for (int j = 0; j < 8; j++)
159  tile[i * 8 + j] = tt[i * 8 + 7 - j];
160  }
161  } else {
162  for (int i = 0; i < 8; i++) {
163  for (int j = 0; j < 8; j++)
164  tile[i * 8 + j] = tt[(7 - i) * 8 + j];
165  }
166  }
167 
168  for (int yy = 0; yy < 8; yy++) {
169  for (int xx = 0; xx < 8; xx++)
170  dst[xx] = pal_idx + tile[xx + yy * 8];
171 
172  dst += frame->linesize[0];
173  }
174  }
175  }
176 
177  return 0;
178 }
179 
181 {
182  const uint8_t *src = s->tileindex_data;
183  uint8_t *dst = frame->data[0];
184 
185  for (int y = 0; y < frame->height; y += 8) {
186  for (int x = 0; x < frame->width; x += 8) {
187  for (int yy = 0; yy < 8; yy++) {
188  for (int xx = 0; xx < 8; xx++)
189  dst[x + xx + yy * frame->linesize[0]] = src[xx];
190  src += 8;
191  }
192  }
193 
194  dst += 8 * frame->linesize[0];
195  }
196 
197  return 0;
198 }
199 
200 static int lzss_decompress(AVCodecContext *avctx,
201  GetByteContext *gb, uint8_t *dst,
202  int dst_size, int shift, int plus)
203 {
204  int oi = 0;
205 
206  while (bytestream2_get_bytes_left(gb) > 0 && oi < dst_size) {
207  uint16_t displace, header = bytestream2_get_be16(gb);
208  int count, offset;
209 
210  for (int i = 0; i < 16; i++) {
211  switch (header >> 15) {
212  case 0:
213  if (oi + 2 < dst_size) {
214  dst[oi++] = bytestream2_get_byte(gb);
215  dst[oi++] = bytestream2_get_byte(gb);
216  }
217  break;
218  case 1:
219  displace = bytestream2_get_be16(gb);
220  count = displace >> shift;
221  offset = displace & ((1 << shift) - 1);
222 
223  if (displace == 0) {
224  while (bytestream2_get_bytes_left(gb) > 0 &&
225  oi < dst_size)
226  dst[oi++] = bytestream2_get_byte(gb);
227  return oi;
228  }
229 
230  count += plus;
231 
232  if (offset <= 0)
233  offset = 1;
234  if (oi < offset || oi + count * 2 > dst_size)
235  return AVERROR_INVALIDDATA;
236  for (int j = 0; j < count * 2; j++) {
237  dst[oi] = dst[oi - offset];
238  oi++;
239  }
240  break;
241  }
242 
243  header <<= 1;
244  }
245  }
246 
247  return AVERROR_INVALIDDATA;
248 }
249 
251 {
252  SGAVideoContext *s = avctx->priv_data;
253  const int bits = (s->nb_pal + 1) / 2;
254  GetByteContext *gb = &s->gb;
255  GetBitContext pm;
256 
257  bytestream2_seek(gb, s->palmapdata_offset, SEEK_SET);
258  if (bytestream2_get_bytes_left(gb) < s->palmapdata_size)
259  return AVERROR_INVALIDDATA;
260  init_get_bits8(&pm, gb->buffer, s->palmapdata_size);
261 
262  for (int y = 0; y < s->tiles_h; y++) {
263  uint8_t *dst = s->palmapindex_data + y * s->tiles_w;
264 
265  for (int x = 0; x < s->tiles_w; x++)
266  dst[x] = get_bits(&pm, bits);
267 
268  dst += s->tiles_w;
269  }
270 
271  return 0;
272 }
273 
274 static int decode_tiledata(AVCodecContext *avctx)
275 {
276  SGAVideoContext *s = avctx->priv_data;
277  GetByteContext *gb = &s->gb;
278  GetBitContext tm;
279 
280  bytestream2_seek(gb, s->tiledata_offset, SEEK_SET);
281  if (bytestream2_get_bytes_left(gb) < s->tiledata_size)
282  return AVERROR_INVALIDDATA;
283  init_get_bits8(&tm, gb->buffer, s->tiledata_size);
284 
285  for (int n = 0; n < s->nb_tiles; n++) {
286  uint8_t *dst = s->tileindex_data + n * 64;
287 
288  for (int yy = 0; yy < 8; yy++) {
289  for (int xx = 0; xx < 8; xx++)
290  dst[xx] = get_bits(&tm, 4);
291 
292  dst += 8;
293  }
294  }
295 
296  for (int i = 0; i < s->nb_tiles && s->swap; i++) {
297  uint8_t *dst = s->tileindex_data + i * 64;
298 
299  for (int j = 8; j < 64; j += 16) {
300  for (int k = 0; k < 8; k += 2)
301  FFSWAP(uint8_t, dst[j + k], dst[j+k+1]);
302  }
303  }
304 
305  return 0;
306 }
307 
309  int *got_frame, AVPacket *avpkt)
310 {
311  SGAVideoContext *s = avctx->priv_data;
312  GetByteContext *gb = &s->gb;
313  int ret, type;
314 
315  if (avpkt->size <= 14)
316  return AVERROR_INVALIDDATA;
317 
318  s->flags = avpkt->data[8];
319  s->nb_pal = avpkt->data[9];
320  s->tiles_w = avpkt->data[10];
321  s->tiles_h = avpkt->data[11];
322 
323  if (s->nb_pal > 4)
324  return AVERROR_INVALIDDATA;
325 
326  if ((ret = ff_set_dimensions(avctx,
327  s->tiles_w * 8,
328  s->tiles_h * 8)) < 0)
329  return ret;
330 
331  av_fast_padded_malloc(&s->tileindex_data, &s->tileindex_size,
332  avctx->width * avctx->height);
333  if (!s->tileindex_data)
334  return AVERROR(ENOMEM);
335 
336  av_fast_padded_malloc(&s->palmapindex_data, &s->palmapindex_size,
337  s->tiles_w * s->tiles_h);
338  if (!s->palmapindex_data)
339  return AVERROR(ENOMEM);
340 
341  if ((ret = ff_get_buffer(avctx, frame, 0)) < 0)
342  return ret;
343 
344  bytestream2_init(gb, avpkt->data, avpkt->size);
345 
346  type = bytestream2_get_byte(gb);
347  s->metadata_size = 12 + ((!!(s->flags & HAVE_TILEMAP)) * 2);
348  s->nb_tiles = s->flags & HAVE_TILEMAP ? AV_RB16(avpkt->data + 12) : s->tiles_w * s->tiles_h;
349  if (s->nb_tiles > s->tiles_w * s->tiles_h)
350  return AVERROR_INVALIDDATA;
351 
352  av_log(avctx, AV_LOG_DEBUG, "type: %X flags: %X nb_tiles: %d\n", type, s->flags, s->nb_tiles);
353 
354  switch (type) {
355  case 0xE7:
356  case 0xCB:
357  case 0xCD:
358  s->swap = 1;
359  s->shift = 12;
360  s->plus = 1;
361  break;
362  case 0xC9:
363  s->swap = 1;
364  s->shift = 13;
365  s->plus = 1;
366  break;
367  case 0xC8:
368  s->swap = 1;
369  s->shift = 13;
370  s->plus = 0;
371  break;
372  case 0xC7:
373  s->swap = 0;
374  s->shift = 13;
375  s->plus = 1;
376  break;
377  case 0xC6:
378  s->swap = 0;
379  s->shift = 13;
380  s->plus = 0;
381  break;
382  }
383 
384  if (type == 0xE7) {
385  int offset = s->metadata_size, left;
386  int sizes[3];
387 
388  bytestream2_seek(gb, s->metadata_size, SEEK_SET);
389 
390  for (int i = 0; i < 3; i++)
391  sizes[i] = bytestream2_get_be16(gb);
392 
393  for (int i = 0; i < 3; i++) {
394  int size = sizes[i];
395  int raw = size >> 15;
396 
397  size &= (1 << 15) - 1;
398 
399  if (raw) {
401  return AVERROR_INVALIDDATA;
402 
403  if (sizeof(s->uncompressed) - offset < size)
404  return AVERROR_INVALIDDATA;
405 
406  memcpy(s->uncompressed + offset, gb->buffer, size);
407  bytestream2_skip(gb, size);
408  } else {
409  GetByteContext gb2;
410 
412  return AVERROR_INVALIDDATA;
413 
414  bytestream2_init(&gb2, gb->buffer, size);
415  ret = lzss_decompress(avctx, &gb2, s->uncompressed + offset,
416  sizeof(s->uncompressed) - offset, s->shift, s->plus);
417  if (ret < 0)
418  return ret;
419  bytestream2_skip(gb, size);
420  size = ret;
421  }
422 
423  offset += size;
424  }
425 
427  if (sizeof(s->uncompressed) - offset < left)
428  return AVERROR_INVALIDDATA;
429 
430  bytestream2_get_buffer(gb, s->uncompressed + offset, left);
431 
432  offset += left;
433  bytestream2_init(gb, s->uncompressed, offset);
434  }
435 
436  switch (type) {
437  case 0xCD:
438  case 0xCB:
439  case 0xC9:
440  case 0xC8:
441  case 0xC7:
442  case 0xC6:
443  bytestream2_seek(gb, s->metadata_size, SEEK_SET);
444  ret = lzss_decompress(avctx, gb, s->uncompressed + s->metadata_size,
445  sizeof(s->uncompressed) - s->metadata_size, s->shift, s->plus);
446  if (ret < 0)
447  return ret;
448  bytestream2_init(gb, s->uncompressed, ret + s->metadata_size);
449  case 0xE7:
450  case 0xC1:
451  s->tiledata_size = s->nb_tiles * 32;
452  s->paldata_size = s->nb_pal * 18;
453  s->tiledata_offset = s->flags & PALDATA_FOLLOWS_TILEDATA ? s->metadata_size : s->metadata_size + s->paldata_size;
454  s->paldata_offset = s->flags & PALDATA_FOLLOWS_TILEDATA ? s->metadata_size + s->tiledata_size : s->metadata_size;
455  s->palmapdata_offset = (s->flags & HAVE_TILEMAP) ? -1 : s->paldata_offset + s->paldata_size;
456  s->palmapdata_size = (s->flags & HAVE_TILEMAP) || s->nb_pal < 2 ? 0 : (s->tiles_w * s->tiles_h * ((s->nb_pal + 1) / 2) + 7) / 8;
457  s->tilemapdata_size = (s->flags & HAVE_TILEMAP) ? s->tiles_w * s->tiles_h * 2 : 0;
458  s->tilemapdata_offset = (s->flags & HAVE_TILEMAP) ? s->paldata_offset + s->paldata_size: -1;
459 
460  bytestream2_seek(gb, s->paldata_offset, SEEK_SET);
461  for (int n = 0; n < s->nb_pal; n++) {
462  ret = decode_palette(gb, s->pal + 16 * n);
463  if (ret < 0)
464  return ret;
465  }
466 
467  if (s->tiledata_size > 0) {
468  ret = decode_tiledata(avctx);
469  if (ret < 0)
470  return ret;
471  }
472 
473  if (s->palmapdata_size > 0) {
474  ret = decode_palmapdata(avctx);
475  if (ret < 0)
476  return ret;
477  }
478 
479  if (s->palmapdata_size > 0 && s->tiledata_size > 0) {
481  if (ret < 0)
482  return ret;
483  } else if (s->tilemapdata_size > 0 && s->tiledata_size > 0) {
485  if (ret < 0)
486  return ret;
487  } else if (s->tiledata_size > 0) {
488  ret = decode_index(s, frame);
489  if (ret < 0)
490  return ret;
491  }
492  break;
493  default:
494  av_log(avctx, AV_LOG_ERROR, "Unknown type: %X\n", type);
495  return AVERROR_INVALIDDATA;
496  }
497 
498  memcpy(frame->data[1], s->pal, AVPALETTE_SIZE);
499 #if FF_API_PALETTE_HAS_CHANGED
503 #endif
506 
507  *got_frame = 1;
508 
509  return avpkt->size;
510 }
511 
513 {
514  SGAVideoContext *s = avctx->priv_data;
515 
516  av_freep(&s->tileindex_data);
517  s->tileindex_size = 0;
518 
519  av_freep(&s->palmapindex_data);
520  s->palmapindex_size = 0;
521 
522  return 0;
523 }
524 
526  .p.name = "sga",
527  CODEC_LONG_NAME("Digital Pictures SGA Video"),
528  .p.type = AVMEDIA_TYPE_VIDEO,
529  .p.id = AV_CODEC_ID_SGA_VIDEO,
530  .priv_data_size = sizeof(SGAVideoContext),
533  .close = sga_decode_end,
534  .p.capabilities = AV_CODEC_CAP_DR1,
535 };
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
SGAVideoContext::tileindex_data
uint8_t * tileindex_data
Definition: sga.c:54
av_clip
#define av_clip
Definition: common.h:96
AVFrame::palette_has_changed
attribute_deprecated int palette_has_changed
Tell user application that palette has changed from previous frame.
Definition: frame.h:546
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
SGAVideoContext::palmapindex_size
unsigned palmapindex_size
Definition: sga.c:57
color
Definition: vf_paletteuse.c:511
GetByteContext
Definition: bytestream.h:33
SGAVideoContext::pal
uint32_t pal[256]
Definition: sga.c:53
SGAVideoContext::nb_tiles
int nb_tiles
Definition: sga.c:47
bytestream2_seek
static av_always_inline int bytestream2_seek(GetByteContext *g, int offset, int whence)
Definition: bytestream.h:212
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:340
AVFrame::width
int width
Definition: frame.h:412
AVPacket::data
uint8_t * data
Definition: packet.h:491
SGAVideoContext::tiles_w
int tiles_w
Definition: sga.c:48
FFCodec
Definition: codec_internal.h:127
SGAVideoContext::plus
int plus
Definition: sga.c:50
AVFrame::flags
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:649
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:94
SGAVideoContext::tiledata_offset
int tiledata_offset
Definition: sga.c:37
lzss_decompress
static int lzss_decompress(AVCodecContext *avctx, GetByteContext *gb, uint8_t *dst, int dst_size, int shift, int plus)
Definition: sga.c:200
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:361
SGAVideoContext::tileindex_size
unsigned tileindex_size
Definition: sga.c:55
bytestream2_skip
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:168
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:335
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
SGAVideoContext::metadata_size
int metadata_size
Definition: sga.c:35
GetBitContext
Definition: get_bits.h:108
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
SGAVideoContext::tiledata_size
int tiledata_size
Definition: sga.c:36
sga_decode_end
static av_cold int sga_decode_end(AVCodecContext *avctx)
Definition: sga.c:512
HAVE_TILEMAP
#define HAVE_TILEMAP
Definition: sga.c:30
decode_palmapdata
static int decode_palmapdata(AVCodecContext *avctx)
Definition: sga.c:250
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
decode_tiledata
static int decode_tiledata(AVCodecContext *avctx)
Definition: sga.c:274
av_cold
#define av_cold
Definition: attributes.h:90
init_get_bits8
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:545
SGAVideoContext::gb
GetByteContext gb
Definition: sga.c:33
AV_FRAME_FLAG_KEY
#define AV_FRAME_FLAG_KEY
A flag to mark frames that are keyframes.
Definition: frame.h:628
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:306
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_CODEC_ID_SGA_VIDEO
@ AV_CODEC_ID_SGA_VIDEO
Definition: codec_id.h:313
GetByteContext::buffer
const uint8_t * buffer
Definition: bytestream.h:34
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts_bsf.c:365
bits
uint8_t bits
Definition: vp3data.h:128
decode_index
static int decode_index(SGAVideoContext *s, AVFrame *frame)
Definition: sga.c:180
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
decode.h
get_bits.h
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:272
frame
static AVFrame * frame
Definition: demux_decode.c:54
sizes
static const int sizes[][2]
Definition: img2dec.c:58
AVPALETTE_SIZE
#define AVPALETTE_SIZE
Definition: pixfmt.h:32
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
get_bits1
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:388
ff_sga_decoder
const FFCodec ff_sga_decoder
Definition: sga.c:525
bytestream2_get_buffer
static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, uint8_t *dst, unsigned int size)
Definition: bytestream.h:267
SGAVideoContext::nb_pal
int nb_pal
Definition: sga.c:46
SGAVideoContext::palmapindex_data
uint8_t * palmapindex_data
Definition: sga.c:56
index
int index
Definition: gxfenc.c:89
decode_palette
static int decode_palette(GetByteContext *gb, uint32_t *pal)
Definition: sga.c:67
bytestream2_get_bytes_left
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
AVFrame::pict_type
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:442
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1617
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:492
codec_internal.h
shift
static int shift(int a, int b)
Definition: bonk.c:262
size
int size
Definition: twinvq_data.h:10344
sga_decode_frame
static int sga_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: sga.c:308
header
static const uint8_t header[24]
Definition: sdr2.c:67
SGAVideoContext::flags
int flags
Definition: sga.c:45
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
PALDATA_FOLLOWS_TILEDATA
#define PALDATA_FOLLOWS_TILEDATA
Definition: sga.c:28
SGAVideoContext
Definition: sga.c:32
decode_index_palmap
static int decode_index_palmap(SGAVideoContext *s, AVFrame *frame)
Definition: sga.c:106
SGAVideoContext::tilemapdata_offset
int tilemapdata_offset
Definition: sga.c:39
SGAVideoContext::tilemapdata_size
int tilemapdata_size
Definition: sga.c:38
SGAVideoContext::palmapdata_size
int palmapdata_size
Definition: sga.c:43
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
SGAVideoContext::swap
int swap
Definition: sga.c:51
common.h
decode_index_tilemap
static int decode_index_tilemap(SGAVideoContext *s, AVFrame *frame)
Definition: sga.c:128
av_fast_padded_malloc
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:52
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:194
AVCodecContext::height
int height
Definition: avcodec.h:621
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:658
avcodec.h
SGAVideoContext::shift
int shift
Definition: sga.c:49
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
SGAVideoContext::palmapdata_offset
int palmapdata_offset
Definition: sga.c:42
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
AVCodecContext
main external API structure.
Definition: avcodec.h:441
AVFrame::height
int height
Definition: frame.h:412
SGAVideoContext::paldata_size
int paldata_size
Definition: sga.c:40
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVPacket
This structure stores compressed data.
Definition: packet.h:468
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:468
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:621
bytestream.h
bytestream2_init
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:385
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
SGAVideoContext::tiles_h
int tiles_h
Definition: sga.c:48
sga_decode_init
static av_cold int sga_decode_init(AVCodecContext *avctx)
Definition: sga.c:61
SGAVideoContext::uncompressed
uint8_t uncompressed[65536]
Definition: sga.c:58
AV_RB16
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_WB32 unsigned int_TMPL AV_WB24 unsigned int_TMPL AV_RB16
Definition: bytestream.h:98
SGAVideoContext::paldata_offset
int paldata_offset
Definition: sga.c:41