FFmpeg
rasc.c
Go to the documentation of this file.
1 /*
2  * RemotelyAnywhere Screen Capture decoder
3  *
4  * Copyright (c) 2018 Paul B Mahol
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <stdio.h>
24 #include <stdlib.h>
25 #include <string.h>
26 
27 #include "libavutil/avassert.h"
28 #include "libavutil/imgutils.h"
29 #include "libavutil/opt.h"
30 
31 #include "avcodec.h"
32 #include "bytestream.h"
33 #include "internal.h"
34 
35 #include <zlib.h>
36 
37 #define KBND MKTAG('K', 'B', 'N', 'D')
38 #define FINT MKTAG('F', 'I', 'N', 'T')
39 #define INIT MKTAG('I', 'N', 'I', 'T')
40 #define BNDL MKTAG('B', 'N', 'D', 'L')
41 #define KFRM MKTAG('K', 'F', 'R', 'M')
42 #define DLTA MKTAG('D', 'L', 'T', 'A')
43 #define MOUS MKTAG('M', 'O', 'U', 'S')
44 #define MPOS MKTAG('M', 'P', 'O', 'S')
45 #define MOVE MKTAG('M', 'O', 'V', 'E')
46 #define EMPT MKTAG('E', 'M', 'P', 'T')
47 
48 typedef struct RASCContext {
49  AVClass *class;
56  unsigned cursor_w;
57  unsigned cursor_h;
58  unsigned cursor_x;
59  unsigned cursor_y;
60  int stride;
61  int bpp;
62  z_stream zstream;
66 } RASCContext;
67 
68 static void clear_plane(AVCodecContext *avctx, AVFrame *frame)
69 {
70  RASCContext *s = avctx->priv_data;
71  uint8_t *dst = frame->data[0];
72 
73  if (!dst)
74  return;
75 
76  for (int y = 0; y < avctx->height; y++) {
77  memset(dst, 0, avctx->width * s->bpp);
78  dst += frame->linesize[0];
79  }
80 }
81 
82 static void copy_plane(AVCodecContext *avctx, AVFrame *src, AVFrame *dst)
83 {
84  RASCContext *s = avctx->priv_data;
85  uint8_t *srcp = src->data[0];
86  uint8_t *dstp = dst->data[0];
87 
88  for (int y = 0; y < avctx->height; y++) {
89  memcpy(dstp, srcp, s->stride);
90  srcp += src->linesize[0];
91  dstp += dst->linesize[0];
92  }
93 }
94 
95 static int init_frames(AVCodecContext *avctx)
96 {
97  RASCContext *s = avctx->priv_data;
98  int ret;
99 
102  if ((ret = ff_get_buffer(avctx, s->frame1, 0)) < 0)
103  return ret;
104 
105  if ((ret = ff_get_buffer(avctx, s->frame2, 0)) < 0)
106  return ret;
107 
108  clear_plane(avctx, s->frame2);
109  clear_plane(avctx, s->frame1);
110 
111  return 0;
112 }
113 
114 static int decode_fint(AVCodecContext *avctx,
115  AVPacket *avpkt, unsigned size)
116 {
117  RASCContext *s = avctx->priv_data;
118  GetByteContext *gb = &s->gb;
119  unsigned w, h, fmt;
120  int ret;
121 
122  if (bytestream2_peek_le32(gb) != 0x65) {
123  if (!s->frame2->data[0] || !s->frame1->data[0])
124  return AVERROR_INVALIDDATA;
125 
126  clear_plane(avctx, s->frame2);
127  clear_plane(avctx, s->frame1);
128  return 0;
129  }
130  if (bytestream2_get_bytes_left(gb) < 72)
131  return AVERROR_INVALIDDATA;
132 
133  bytestream2_skip(gb, 8);
134  w = bytestream2_get_le32(gb);
135  h = bytestream2_get_le32(gb);
136  bytestream2_skip(gb, 30);
137  fmt = bytestream2_get_le16(gb);
138  bytestream2_skip(gb, 24);
139 
140  switch (fmt) {
141  case 8: s->stride = FFALIGN(w, 4);
142  s->bpp = 1;
143  fmt = AV_PIX_FMT_PAL8; break;
144  case 16: s->stride = w * 2;
145  s->bpp = 2;
146  fmt = AV_PIX_FMT_RGB555LE; break;
147  case 32: s->stride = w * 4;
148  s->bpp = 4;
149  fmt = AV_PIX_FMT_BGR0; break;
150  default: return AVERROR_INVALIDDATA;
151  }
152 
153  ret = ff_set_dimensions(avctx, w, h);
154  if (ret < 0)
155  return ret;
156  avctx->width = w;
157  avctx->height = h;
158  avctx->pix_fmt = fmt;
159 
160  ret = init_frames(avctx);
161  if (ret < 0)
162  return ret;
163 
164  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
165  uint32_t *pal = (uint32_t *)s->frame2->data[1];
166 
167  for (int i = 0; i < 256; i++)
168  pal[i] = bytestream2_get_le32(gb) | 0xFF000000u;
169  }
170 
171  return 0;
172 }
173 
174 static int decode_zlib(AVCodecContext *avctx, AVPacket *avpkt,
175  unsigned size, unsigned uncompressed_size)
176 {
177  RASCContext *s = avctx->priv_data;
178  GetByteContext *gb = &s->gb;
179  int zret;
180 
181  zret = inflateReset(&s->zstream);
182  if (zret != Z_OK) {
183  av_log(avctx, AV_LOG_ERROR, "Inflate reset error: %d\n", zret);
184  return AVERROR_EXTERNAL;
185  }
186 
187  av_fast_padded_malloc(&s->delta, &s->delta_size, uncompressed_size);
188  if (!s->delta)
189  return AVERROR(ENOMEM);
190 
191  s->zstream.next_in = avpkt->data + bytestream2_tell(gb);
192  s->zstream.avail_in = FFMIN(size, bytestream2_get_bytes_left(gb));
193 
194  s->zstream.next_out = s->delta;
195  s->zstream.avail_out = s->delta_size;
196 
197  zret = inflate(&s->zstream, Z_FINISH);
198  if (zret != Z_STREAM_END) {
199  av_log(avctx, AV_LOG_ERROR,
200  "Inflate failed with return code: %d.\n", zret);
201  return AVERROR_INVALIDDATA;
202  }
203 
204  return 0;
205 }
206 
207 static int decode_move(AVCodecContext *avctx,
208  AVPacket *avpkt, unsigned size)
209 {
210  RASCContext *s = avctx->priv_data;
211  GetByteContext *gb = &s->gb;
213  unsigned pos, compression, nb_moves;
214  unsigned uncompressed_size;
215  int ret;
216 
217  pos = bytestream2_tell(gb);
218  bytestream2_skip(gb, 8);
219  nb_moves = bytestream2_get_le32(gb);
220  bytestream2_skip(gb, 8);
221  compression = bytestream2_get_le32(gb);
222 
223  if (nb_moves > INT32_MAX / 16 || nb_moves > avctx->width * avctx->height)
224  return AVERROR_INVALIDDATA;
225 
226  uncompressed_size = 16 * nb_moves;
227 
228  if (compression == 1) {
229  ret = decode_zlib(avctx, avpkt,
230  size - (bytestream2_tell(gb) - pos),
231  uncompressed_size);
232  if (ret < 0)
233  return ret;
234  bytestream2_init(&mc, s->delta, uncompressed_size);
235  } else if (compression == 0) {
236  bytestream2_init(&mc, avpkt->data + bytestream2_tell(gb),
238  } else if (compression == 2) {
239  avpriv_request_sample(avctx, "compression %d", compression);
240  return AVERROR_PATCHWELCOME;
241  } else {
242  return AVERROR_INVALIDDATA;
243  }
244 
245  if (bytestream2_get_bytes_left(&mc) < uncompressed_size)
246  return AVERROR_INVALIDDATA;
247 
248  for (int i = 0; i < nb_moves; i++) {
249  int type, start_x, start_y, end_x, end_y, mov_x, mov_y;
250  uint8_t *e2, *b1, *b2;
251  int w, h;
252 
253  type = bytestream2_get_le16(&mc);
254  start_x = bytestream2_get_le16(&mc);
255  start_y = bytestream2_get_le16(&mc);
256  end_x = bytestream2_get_le16(&mc);
257  end_y = bytestream2_get_le16(&mc);
258  mov_x = bytestream2_get_le16(&mc);
259  mov_y = bytestream2_get_le16(&mc);
260  bytestream2_skip(&mc, 2);
261 
262  if (start_x >= avctx->width || start_y >= avctx->height ||
263  end_x >= avctx->width || end_y >= avctx->height ||
264  mov_x >= avctx->width || mov_y >= avctx->height) {
265  continue;
266  }
267 
268  if (start_x >= end_x || start_y >= end_y)
269  continue;
270 
271  w = end_x - start_x;
272  h = end_y - start_y;
273 
274  if (mov_x + w > avctx->width || mov_y + h > avctx->height)
275  continue;
276 
277  if (!s->frame2->data[0] || !s->frame1->data[0])
278  return AVERROR_INVALIDDATA;
279 
280  b1 = s->frame1->data[0] + s->frame1->linesize[0] * (start_y + h - 1) + start_x * s->bpp;
281  b2 = s->frame2->data[0] + s->frame2->linesize[0] * (start_y + h - 1) + start_x * s->bpp;
282  e2 = s->frame2->data[0] + s->frame2->linesize[0] * (mov_y + h - 1) + mov_x * s->bpp;
283 
284  if (type == 2) {
285  for (int j = 0; j < h; j++) {
286  memcpy(b1, b2, w * s->bpp);
287  b1 -= s->frame1->linesize[0];
288  b2 -= s->frame2->linesize[0];
289  }
290  } else if (type == 1) {
291  for (int j = 0; j < h; j++) {
292  memset(b2, 0, w * s->bpp);
293  b2 -= s->frame2->linesize[0];
294  }
295  } else if (type == 0) {
296  uint8_t *buffer;
297 
298  av_fast_padded_malloc(&s->delta, &s->delta_size, w * h * s->bpp);
299  buffer = s->delta;
300  if (!buffer)
301  return AVERROR(ENOMEM);
302 
303  for (int j = 0; j < h; j++) {
304  memcpy(buffer + j * w * s->bpp, e2, w * s->bpp);
305  e2 -= s->frame2->linesize[0];
306  }
307 
308  for (int j = 0; j < h; j++) {
309  memcpy(b2, buffer + j * w * s->bpp, w * s->bpp);
310  b2 -= s->frame2->linesize[0];
311  }
312  } else {
313  return AVERROR_INVALIDDATA;
314  }
315  }
316 
317  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
318 
319  return 0;
320 }
321 
322 #define NEXT_LINE \
323  if (cx >= w * s->bpp) { \
324  cx = 0; \
325  cy--; \
326  b1 -= s->frame1->linesize[0]; \
327  b2 -= s->frame2->linesize[0]; \
328  } \
329  len--;
330 
331 static int decode_dlta(AVCodecContext *avctx,
332  AVPacket *avpkt, unsigned size)
333 {
334  RASCContext *s = avctx->priv_data;
335  GetByteContext *gb = &s->gb;
337  unsigned uncompressed_size, pos;
338  unsigned x, y, w, h;
339  int ret, cx, cy, compression;
340  uint8_t *b1, *b2;
341 
342  pos = bytestream2_tell(gb);
343  bytestream2_skip(gb, 12);
344  uncompressed_size = bytestream2_get_le32(gb);
345  x = bytestream2_get_le32(gb);
346  y = bytestream2_get_le32(gb);
347  w = bytestream2_get_le32(gb);
348  h = bytestream2_get_le32(gb);
349 
350  if (x >= avctx->width || y >= avctx->height ||
351  w > avctx->width || h > avctx->height)
352  return AVERROR_INVALIDDATA;
353 
354  if (x + w > avctx->width || y + h > avctx->height)
355  return AVERROR_INVALIDDATA;
356 
357  bytestream2_skip(gb, 4);
358  compression = bytestream2_get_le32(gb);
359 
360  if (compression == 1) {
361  if (w * h * s->bpp * 3 < uncompressed_size)
362  return AVERROR_INVALIDDATA;
363  ret = decode_zlib(avctx, avpkt, size, uncompressed_size);
364  if (ret < 0)
365  return ret;
366  bytestream2_init(&dc, s->delta, uncompressed_size);
367  } else if (compression == 0) {
368  if (bytestream2_get_bytes_left(gb) < uncompressed_size)
369  return AVERROR_INVALIDDATA;
370  bytestream2_init(&dc, avpkt->data + bytestream2_tell(gb),
371  uncompressed_size);
372  } else if (compression == 2) {
373  avpriv_request_sample(avctx, "compression %d", compression);
374  return AVERROR_PATCHWELCOME;
375  } else {
376  return AVERROR_INVALIDDATA;
377  }
378 
379  if (!s->frame2->data[0] || !s->frame1->data[0])
380  return AVERROR_INVALIDDATA;
381 
382  b1 = s->frame1->data[0] + s->frame1->linesize[0] * (y + h - 1) + x * s->bpp;
383  b2 = s->frame2->data[0] + s->frame2->linesize[0] * (y + h - 1) + x * s->bpp;
384  cx = 0, cy = h;
385  while (bytestream2_get_bytes_left(&dc) > 0) {
386  int type = bytestream2_get_byte(&dc);
387  int len = bytestream2_get_byte(&dc);
388  unsigned fill;
389 
390  switch (type) {
391  case 1:
392  while (len > 0 && cy > 0) {
393  cx++;
394  NEXT_LINE
395  }
396  break;
397  case 2:
398  while (len > 0 && cy > 0) {
399  int v0 = b1[cx];
400  int v1 = b2[cx];
401 
402  b2[cx] = v0;
403  b1[cx] = v1;
404  cx++;
405  NEXT_LINE
406  }
407  break;
408  case 3:
409  while (len > 0 && cy > 0) {
410  fill = bytestream2_get_byte(&dc);
411  b1[cx] = b2[cx];
412  b2[cx] = fill;
413  cx++;
414  NEXT_LINE
415  }
416  break;
417  case 4:
418  fill = bytestream2_get_byte(&dc);
419  while (len > 0 && cy > 0) {
420  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
421  AV_WL32(b2 + cx, fill);
422  cx++;
423  NEXT_LINE
424  }
425  break;
426  case 7:
427  fill = bytestream2_get_le32(&dc);
428  while (len > 0 && cy > 0) {
429  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
430  AV_WL32(b2 + cx, fill);
431  cx += 4;
432  NEXT_LINE
433  }
434  break;
435  case 10:
436  while (len > 0 && cy > 0) {
437  cx += 4;
438  NEXT_LINE
439  }
440  break;
441  case 12:
442  while (len > 0 && cy > 0) {
443  unsigned v0, v1;
444 
445  v0 = AV_RL32(b2 + cx);
446  v1 = AV_RL32(b1 + cx);
447  AV_WL32(b2 + cx, v1);
448  AV_WL32(b1 + cx, v0);
449  cx += 4;
450  NEXT_LINE
451  }
452  break;
453  case 13:
454  while (len > 0 && cy > 0) {
455  fill = bytestream2_get_le32(&dc);
456  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
457  AV_WL32(b2 + cx, fill);
458  cx += 4;
459  NEXT_LINE
460  }
461  break;
462  default:
463  avpriv_request_sample(avctx, "runlen %d", type);
464  return AVERROR_INVALIDDATA;
465  }
466  }
467 
468  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
469 
470  return 0;
471 }
472 
473 static int decode_kfrm(AVCodecContext *avctx,
474  AVPacket *avpkt, unsigned size)
475 {
476  RASCContext *s = avctx->priv_data;
477  GetByteContext *gb = &s->gb;
478  uint8_t *dst;
479  unsigned pos;
480  int zret, ret;
481 
482  pos = bytestream2_tell(gb);
483  if (bytestream2_peek_le32(gb) == 0x65) {
484  ret = decode_fint(avctx, avpkt, size);
485  if (ret < 0)
486  return ret;
487  }
488 
489  if (!s->frame2->data[0])
490  return AVERROR_INVALIDDATA;
491 
492  zret = inflateReset(&s->zstream);
493  if (zret != Z_OK) {
494  av_log(avctx, AV_LOG_ERROR, "Inflate reset error: %d\n", zret);
495  return AVERROR_EXTERNAL;
496  }
497 
498  s->zstream.next_in = avpkt->data + bytestream2_tell(gb);
499  s->zstream.avail_in = bytestream2_get_bytes_left(gb);
500 
501  dst = s->frame2->data[0] + (avctx->height - 1) * s->frame2->linesize[0];
502  for (int i = 0; i < avctx->height; i++) {
503  s->zstream.next_out = dst;
504  s->zstream.avail_out = s->stride;
505 
506  zret = inflate(&s->zstream, Z_SYNC_FLUSH);
507  if (zret != Z_OK && zret != Z_STREAM_END) {
508  av_log(avctx, AV_LOG_ERROR,
509  "Inflate failed with return code: %d.\n", zret);
510  return AVERROR_INVALIDDATA;
511  }
512 
513  dst -= s->frame2->linesize[0];
514  }
515 
516  dst = s->frame1->data[0] + (avctx->height - 1) * s->frame1->linesize[0];
517  for (int i = 0; i < avctx->height; i++) {
518  s->zstream.next_out = dst;
519  s->zstream.avail_out = s->stride;
520 
521  zret = inflate(&s->zstream, Z_SYNC_FLUSH);
522  if (zret != Z_OK && zret != Z_STREAM_END) {
523  av_log(avctx, AV_LOG_ERROR,
524  "Inflate failed with return code: %d.\n", zret);
525  return AVERROR_INVALIDDATA;
526  }
527 
528  dst -= s->frame1->linesize[0];
529  }
530 
531  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
532 
533  return 0;
534 }
535 
536 static int decode_mous(AVCodecContext *avctx,
537  AVPacket *avpkt, unsigned size)
538 {
539  RASCContext *s = avctx->priv_data;
540  GetByteContext *gb = &s->gb;
541  unsigned w, h, pos, uncompressed_size;
542  int ret;
543 
544  pos = bytestream2_tell(gb);
545  bytestream2_skip(gb, 8);
546  w = bytestream2_get_le32(gb);
547  h = bytestream2_get_le32(gb);
548  bytestream2_skip(gb, 12);
549  uncompressed_size = bytestream2_get_le32(gb);
550 
551  if (w > avctx->width || h > avctx->height)
552  return AVERROR_INVALIDDATA;
553 
554  if (uncompressed_size != 3 * w * h)
555  return AVERROR_INVALIDDATA;
556 
557  av_fast_padded_malloc(&s->cursor, &s->cursor_size, uncompressed_size);
558  if (!s->cursor)
559  return AVERROR(ENOMEM);
560 
561  ret = decode_zlib(avctx, avpkt,
562  size - (bytestream2_tell(gb) - pos),
563  uncompressed_size);
564  if (ret < 0)
565  return ret;
566  memcpy(s->cursor, s->delta, uncompressed_size);
567 
568  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
569 
570  s->cursor_w = w;
571  s->cursor_h = h;
572 
573  return 0;
574 }
575 
576 static int decode_mpos(AVCodecContext *avctx,
577  AVPacket *avpkt, unsigned size)
578 {
579  RASCContext *s = avctx->priv_data;
580  GetByteContext *gb = &s->gb;
581  unsigned pos;
582 
583  pos = bytestream2_tell(gb);
584  bytestream2_skip(gb, 8);
585  s->cursor_x = bytestream2_get_le32(gb);
586  s->cursor_y = bytestream2_get_le32(gb);
587 
588  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
589 
590  return 0;
591 }
592 
593 static void draw_cursor(AVCodecContext *avctx)
594 {
595  RASCContext *s = avctx->priv_data;
596  uint8_t *dst, *pal;
597 
598  if (!s->cursor)
599  return;
600 
601  if (s->cursor_x >= avctx->width || s->cursor_y >= avctx->height)
602  return;
603 
604  if (s->cursor_x + s->cursor_w > avctx->width ||
605  s->cursor_y + s->cursor_h > avctx->height)
606  return;
607 
608  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
609  pal = s->frame->data[1];
610  for (int i = 0; i < s->cursor_h; i++) {
611  for (int j = 0; j < s->cursor_w; j++) {
612  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
613  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
614  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
615  int best = INT_MAX;
616  int index = 0;
617  int dist;
618 
619  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
620  continue;
621 
622  dst = s->frame->data[0] + s->frame->linesize[0] * (s->cursor_y + i) + (s->cursor_x + j);
623  for (int k = 0; k < 256; k++) {
624  int pr = pal[k * 4 + 0];
625  int pg = pal[k * 4 + 1];
626  int pb = pal[k * 4 + 2];
627 
628  dist = FFABS(cr - pr) + FFABS(cg - pg) + FFABS(cb - pb);
629  if (dist < best) {
630  best = dist;
631  index = k;
632  }
633  }
634  dst[0] = index;
635  }
636  }
637  } else if (avctx->pix_fmt == AV_PIX_FMT_RGB555LE) {
638  for (int i = 0; i < s->cursor_h; i++) {
639  for (int j = 0; j < s->cursor_w; j++) {
640  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
641  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
642  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
643 
644  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
645  continue;
646 
647  cr >>= 3; cg >>=3; cb >>= 3;
648  dst = s->frame->data[0] + s->frame->linesize[0] * (s->cursor_y + i) + 2 * (s->cursor_x + j);
649  AV_WL16(dst, cr | cg << 5 | cb << 10);
650  }
651  }
652  } else if (avctx->pix_fmt == AV_PIX_FMT_BGR0) {
653  for (int i = 0; i < s->cursor_h; i++) {
654  for (int j = 0; j < s->cursor_w; j++) {
655  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
656  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
657  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
658 
659  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
660  continue;
661 
662  dst = s->frame->data[0] + s->frame->linesize[0] * (s->cursor_y + i) + 4 * (s->cursor_x + j);
663  dst[0] = cb;
664  dst[1] = cg;
665  dst[2] = cr;
666  }
667  }
668  }
669 }
670 
671 static int decode_frame(AVCodecContext *avctx,
672  void *data, int *got_frame,
673  AVPacket *avpkt)
674 {
675  RASCContext *s = avctx->priv_data;
676  GetByteContext *gb = &s->gb;
677  int ret, intra = 0;
678  AVFrame *frame = data;
679 
680  bytestream2_init(gb, avpkt->data, avpkt->size);
681 
682  if (bytestream2_peek_le32(gb) == EMPT)
683  return avpkt->size;
684 
685  s->frame = frame;
686 
687  while (bytestream2_get_bytes_left(gb) > 0) {
688  unsigned type, size = 0;
689 
690  if (bytestream2_get_bytes_left(gb) < 8)
691  return AVERROR_INVALIDDATA;
692 
693  type = bytestream2_get_le32(gb);
694  if (type == KBND || type == BNDL) {
695  intra = type == KBND;
696  type = bytestream2_get_le32(gb);
697  }
698 
699  size = bytestream2_get_le32(gb);
700  if (bytestream2_get_bytes_left(gb) < size)
701  return AVERROR_INVALIDDATA;
702 
703  switch (type) {
704  case FINT:
705  case INIT:
706  ret = decode_fint(avctx, avpkt, size);
707  break;
708  case KFRM:
709  ret = decode_kfrm(avctx, avpkt, size);
710  break;
711  case DLTA:
712  ret = decode_dlta(avctx, avpkt, size);
713  break;
714  case MOVE:
715  ret = decode_move(avctx, avpkt, size);
716  break;
717  case MOUS:
718  ret = decode_mous(avctx, avpkt, size);
719  break;
720  case MPOS:
721  ret = decode_mpos(avctx, avpkt, size);
722  break;
723  default:
724  bytestream2_skip(gb, size);
725  }
726 
727  if (ret < 0)
728  return ret;
729  }
730 
731  if (!s->frame2->data[0] || !s->frame1->data[0])
732  return AVERROR_INVALIDDATA;
733 
734  if ((ret = ff_get_buffer(avctx, s->frame, 0)) < 0)
735  return ret;
736 
737  copy_plane(avctx, s->frame2, s->frame);
738  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
739  memcpy(s->frame->data[1], s->frame2->data[1], 1024);
740  if (!s->skip_cursor)
741  draw_cursor(avctx);
742 
743  s->frame->key_frame = intra;
745 
746  *got_frame = 1;
747 
748  return avpkt->size;
749 }
750 
752 {
753  RASCContext *s = avctx->priv_data;
754  int zret;
755 
756  s->zstream.zalloc = Z_NULL;
757  s->zstream.zfree = Z_NULL;
758  s->zstream.opaque = Z_NULL;
759  zret = inflateInit(&s->zstream);
760  if (zret != Z_OK) {
761  av_log(avctx, AV_LOG_ERROR, "Inflate init error: %d\n", zret);
762  return AVERROR_EXTERNAL;
763  }
764 
765  s->frame1 = av_frame_alloc();
766  s->frame2 = av_frame_alloc();
767  if (!s->frame1 || !s->frame2)
768  return AVERROR(ENOMEM);
769 
770  return 0;
771 }
772 
774 {
775  RASCContext *s = avctx->priv_data;
776 
777  av_freep(&s->cursor);
778  s->cursor_size = 0;
779  av_freep(&s->delta);
780  s->delta_size = 0;
781  av_frame_free(&s->frame1);
782  av_frame_free(&s->frame2);
783  inflateEnd(&s->zstream);
784 
785  return 0;
786 }
787 
788 static void decode_flush(AVCodecContext *avctx)
789 {
790  RASCContext *s = avctx->priv_data;
791 
792  clear_plane(avctx, s->frame1);
793  clear_plane(avctx, s->frame2);
794 }
795 
796 static const AVOption options[] = {
797 { "skip_cursor", "skip the cursor", offsetof(RASCContext, skip_cursor), AV_OPT_TYPE_BOOL, {.i64 = 0 }, 0, 1, AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM },
798 { NULL },
799 };
800 
801 static const AVClass rasc_decoder_class = {
802  .class_name = "rasc decoder",
803  .item_name = av_default_item_name,
804  .option = options,
805  .version = LIBAVUTIL_VERSION_INT,
806 };
807 
809  .name = "rasc",
810  .long_name = NULL_IF_CONFIG_SMALL("RemotelyAnywhere Screen Capture"),
811  .type = AVMEDIA_TYPE_VIDEO,
812  .id = AV_CODEC_ID_RASC,
813  .priv_data_size = sizeof(RASCContext),
814  .init = decode_init,
815  .close = decode_close,
816  .decode = decode_frame,
817  .flush = decode_flush,
818  .capabilities = AV_CODEC_CAP_DR1,
819  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE |
821  .priv_class = &rasc_decoder_class,
822 };
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:48
#define EMPT
Definition: rasc.c:46
#define NULL
Definition: coverity.c:32
static void draw_cursor(AVCodecContext *avctx)
Definition: rasc.c:593
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
This structure describes decoded (raw) audio or video data.
Definition: frame.h:314
static av_cold int decode_close(AVCodecContext *avctx)
Definition: rasc.c:773
AVOption.
Definition: opt.h:248
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
static void flush(AVCodecContext *avctx)
misc image utilities
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:106
static const AVClass rasc_decoder_class
Definition: rasc.c:801
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:31
unsigned cursor_y
Definition: rasc.c:59
#define avpriv_request_sample(...)
packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined ...
Definition: pixfmt.h:108
int size
Definition: packet.h:364
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:235
uint8_t * delta
Definition: rasc.c:52
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:741
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
GLint GLenum type
Definition: opengl_enc.c:104
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:72
GLfloat v0
Definition: opengl_enc.c:106
static int decode_dlta(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:331
AVCodec.
Definition: codec.h:190
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: rasc.c:671
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
AVCodec ff_rasc_decoder
Definition: rasc.c:808
#define INIT
Definition: rasc.c:39
#define MOVE
Definition: rasc.c:45
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
static void clear_plane(AVCodecContext *avctx, AVFrame *frame)
Definition: rasc.c:68
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:215
#define KBND
Definition: rasc.c:37
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:40
uint8_t
#define av_cold
Definition: attributes.h:88
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
AVOptions.
unsigned cursor_w
Definition: rasc.c:56
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:264
int cursor_size
Definition: rasc.c:55
uint8_t * data
Definition: packet.h:363
static av_cold int decode_init(AVCodecContext *avctx)
Definition: rasc.c:751
int stride
Definition: rasc.c:60
ptrdiff_t size
Definition: opengl_enc.c:100
#define MPOS
Definition: rasc.c:44
#define BNDL
Definition: rasc.c:40
#define FFALIGN(x, a)
Definition: macros.h:48
#define av_log(a,...)
uint8_t * cursor
Definition: rasc.c:54
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
#define src
Definition: vp8dsp.c:255
#define KFRM
Definition: rasc.c:41
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
AVFrame * frame1
Definition: rasc.c:64
unsigned cursor_x
Definition: rasc.c:58
#define DLTA
Definition: rasc.c:42
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:168
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:115
int bpp
Definition: rasc.c:61
unsigned int pos
Definition: spdifenc.c:410
#define NEXT_LINE
Definition: rasc.c:322
simple assert() macros that are a bit more flexible than ISO C assert().
static int decode_kfrm(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:473
const char * name
Name of the codec implementation.
Definition: codec.h:197
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:397
int skip_cursor
Definition: rasc.c:50
#define FFMIN(a, b)
Definition: common.h:105
int width
picture width / height.
Definition: avcodec.h:704
uint8_t w
Definition: llviddspenc.c:39
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:72
#define s(width, name)
Definition: cbs_vp9.c:257
static int decode_zlib(AVCodecContext *avctx, AVPacket *avpkt, unsigned size, unsigned uncompressed_size)
Definition: rasc.c:174
if(ret)
#define mc
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:192
static double b1(void *priv, double x, double y)
Definition: vf_xfade.c:1665
Libavcodec external API header.
static int decode_move(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:207
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:345
#define AV_OPT_FLAG_VIDEO_PARAM
Definition: opt.h:281
main external API structure.
Definition: avcodec.h:531
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2]...the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so...,+,-,+,-,+,+,-,+,-,+,...hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32-hcoeff[1]-hcoeff[2]-...a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2}an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||.........intra?||||:Block01:yes no||||:Block02:.................||||:Block03::y DC::ref index:||||:Block04::cb DC::motion x:||||.........:cr DC::motion y:||||.................|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------------------------------|||Y subbands||Cb subbands||Cr subbands||||------||------||------|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||------||------||------||||------||------||------|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||------||------||------||||------||------||------|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||------||------||------||||------||------||------|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------------------------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction------------|\Dequantization-------------------\||Reference frames|\IDWT|--------------|Motion\|||Frame 0||Frame 1||Compensation.OBMC v-------|--------------|--------------.\------> Frame n output Frame Frame<----------------------------------/|...|-------------------Range Coder:============Binary Range Coder:-------------------The implemented range coder is an adapted version based upon"Range encoding: an algorithm for removing redundancy from a digitised message."by G.N.N.Martin.The symbols encoded by the Snow range coder are bits(0|1).The associated probabilities are not fix but change depending on the symbol mix seen so far.bit seen|new state---------+-----------------------------------------------0|256-state_transition_table[256-old_state];1|state_transition_table[old_state];state_transition_table={0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:-------------------------FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1.the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled top and top right vectors is used as motion vector prediction the used motion vector is the sum of the predictor and(mvx_diff, mvy_diff)*mv_scale Intra DC Prediction block[y][x] dc[1]
Definition: snow.txt:400
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1884
static void copy_plane(AVCodecContext *avctx, AVFrame *src, AVFrame *dst)
Definition: rasc.c:82
Describe the class of an AVClass context structure.
Definition: log.h:67
int index
Definition: gxfenc.c:89
static void inflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:198
#define AV_OPT_FLAG_DECODING_PARAM
a generic parameter which can be set by the user for demuxing or decoding
Definition: opt.h:279
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:240
static int init_frames(AVCodecContext *avctx)
Definition: rasc.c:95
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:328
static void decode_flush(AVCodecContext *avctx)
Definition: rasc.c:788
static int decode_mous(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:536
static int decode_fint(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:114
common internal api header.
#define AV_WL16(p, v)
Definition: intreadwrite.h:412
#define FINT
Definition: rasc.c:38
AVFrame * frame2
Definition: rasc.c:65
void * priv_data
Definition: avcodec.h:558
static const AVOption options[]
Definition: rasc.c:796
int len
unsigned cursor_h
Definition: rasc.c:57
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:392
static int decode_mpos(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:576
z_stream zstream
Definition: rasc.c:62
#define av_freep(p)
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:216
int delta_size
Definition: rasc.c:53
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:91
This structure stores compressed data.
Definition: packet.h:340
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:50
GetByteContext gb
Definition: rasc.c:51
for(j=16;j >0;--j)
int i
Definition: input.c:407
Predicted.
Definition: avutil.h:275
GLuint buffer
Definition: opengl_enc.c:101
#define AV_WL32(p, v)
Definition: intreadwrite.h:426
AVFrame * frame
Definition: rasc.c:63
#define MOUS
Definition: rasc.c:43
static double b2(void *priv, double x, double y)
Definition: vf_xfade.c:1666