FFmpeg
rasc.c
Go to the documentation of this file.
1 /*
2  * RemotelyAnywhere Screen Capture decoder
3  *
4  * Copyright (c) 2018 Paul B Mahol
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <stdio.h>
24 #include <stdlib.h>
25 #include <string.h>
26 
27 #include "libavutil/avassert.h"
28 #include "libavutil/imgutils.h"
29 #include "libavutil/opt.h"
30 
31 #include "avcodec.h"
32 #include "bytestream.h"
33 #include "internal.h"
34 
35 #include <zlib.h>
36 
37 #define KBND MKTAG('K', 'B', 'N', 'D')
38 #define FINT MKTAG('F', 'I', 'N', 'T')
39 #define INIT MKTAG('I', 'N', 'I', 'T')
40 #define BNDL MKTAG('B', 'N', 'D', 'L')
41 #define KFRM MKTAG('K', 'F', 'R', 'M')
42 #define DLTA MKTAG('D', 'L', 'T', 'A')
43 #define MOUS MKTAG('M', 'O', 'U', 'S')
44 #define MPOS MKTAG('M', 'P', 'O', 'S')
45 #define MOVE MKTAG('M', 'O', 'V', 'E')
46 #define EMPT MKTAG('E', 'M', 'P', 'T')
47 
48 typedef struct RASCContext {
49  AVClass *class;
56  unsigned cursor_w;
57  unsigned cursor_h;
58  unsigned cursor_x;
59  unsigned cursor_y;
60  int stride;
61  int bpp;
62  z_stream zstream;
66 } RASCContext;
67 
68 static void clear_plane(AVCodecContext *avctx, AVFrame *frame)
69 {
70  RASCContext *s = avctx->priv_data;
71  uint8_t *dst = frame->data[0];
72 
73  for (int y = 0; y < avctx->height; y++) {
74  memset(dst, 0, avctx->width * s->bpp);
75  dst += frame->linesize[0];
76  }
77 }
78 
79 static void copy_plane(AVCodecContext *avctx, AVFrame *src, AVFrame *dst)
80 {
81  RASCContext *s = avctx->priv_data;
82  uint8_t *srcp = src->data[0];
83  uint8_t *dstp = dst->data[0];
84 
85  for (int y = 0; y < avctx->height; y++) {
86  memcpy(dstp, srcp, s->stride);
87  srcp += src->linesize[0];
88  dstp += dst->linesize[0];
89  }
90 }
91 
92 static int init_frames(AVCodecContext *avctx)
93 {
94  RASCContext *s = avctx->priv_data;
95  int ret;
96 
99  if ((ret = ff_get_buffer(avctx, s->frame1, 0)) < 0)
100  return ret;
101 
102  if ((ret = ff_get_buffer(avctx, s->frame2, 0)) < 0)
103  return ret;
104 
105  clear_plane(avctx, s->frame2);
106  clear_plane(avctx, s->frame1);
107 
108  return 0;
109 }
110 
111 static int decode_fint(AVCodecContext *avctx,
112  AVPacket *avpkt, unsigned size)
113 {
114  RASCContext *s = avctx->priv_data;
115  GetByteContext *gb = &s->gb;
116  unsigned w, h, fmt;
117  int ret;
118 
119  if (bytestream2_peek_le32(gb) != 0x65) {
120  if (!s->frame2->data[0] || !s->frame1->data[0])
121  return AVERROR_INVALIDDATA;
122 
123  clear_plane(avctx, s->frame2);
124  clear_plane(avctx, s->frame1);
125  return 0;
126  }
127  if (bytestream2_get_bytes_left(gb) < 72)
128  return AVERROR_INVALIDDATA;
129 
130  bytestream2_skip(gb, 8);
131  w = bytestream2_get_le32(gb);
132  h = bytestream2_get_le32(gb);
133  bytestream2_skip(gb, 30);
134  fmt = bytestream2_get_le16(gb);
135  bytestream2_skip(gb, 24);
136 
137  switch (fmt) {
138  case 8: s->stride = FFALIGN(w, 4);
139  s->bpp = 1;
140  fmt = AV_PIX_FMT_PAL8; break;
141  case 16: s->stride = w * 2;
142  s->bpp = 2;
143  fmt = AV_PIX_FMT_RGB555LE; break;
144  case 32: s->stride = w * 4;
145  s->bpp = 4;
146  fmt = AV_PIX_FMT_BGR0; break;
147  default: return AVERROR_INVALIDDATA;
148  }
149 
150  ret = ff_set_dimensions(avctx, w, h);
151  if (ret < 0)
152  return ret;
153  avctx->width = w;
154  avctx->height = h;
155  avctx->pix_fmt = fmt;
156 
157  ret = init_frames(avctx);
158  if (ret < 0)
159  return ret;
160 
161  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
162  uint32_t *pal = (uint32_t *)s->frame2->data[1];
163 
164  for (int i = 0; i < 256; i++)
165  pal[i] = bytestream2_get_le32(gb) | 0xFF000000u;
166  }
167 
168  return 0;
169 }
170 
171 static int decode_zlib(AVCodecContext *avctx, AVPacket *avpkt,
172  unsigned size, unsigned uncompressed_size)
173 {
174  RASCContext *s = avctx->priv_data;
175  GetByteContext *gb = &s->gb;
176  int zret;
177 
178  zret = inflateReset(&s->zstream);
179  if (zret != Z_OK) {
180  av_log(avctx, AV_LOG_ERROR, "Inflate reset error: %d\n", zret);
181  return AVERROR_EXTERNAL;
182  }
183 
184  av_fast_padded_malloc(&s->delta, &s->delta_size, uncompressed_size);
185  if (!s->delta)
186  return AVERROR(ENOMEM);
187 
188  s->zstream.next_in = avpkt->data + bytestream2_tell(gb);
189  s->zstream.avail_in = FFMIN(size, bytestream2_get_bytes_left(gb));
190 
191  s->zstream.next_out = s->delta;
192  s->zstream.avail_out = s->delta_size;
193 
194  zret = inflate(&s->zstream, Z_FINISH);
195  if (zret != Z_STREAM_END) {
196  av_log(avctx, AV_LOG_ERROR,
197  "Inflate failed with return code: %d.\n", zret);
198  return AVERROR_INVALIDDATA;
199  }
200 
201  return 0;
202 }
203 
204 static int decode_move(AVCodecContext *avctx,
205  AVPacket *avpkt, unsigned size)
206 {
207  RASCContext *s = avctx->priv_data;
208  GetByteContext *gb = &s->gb;
210  unsigned pos, compression, nb_moves;
211  unsigned uncompressed_size;
212  int ret;
213 
214  pos = bytestream2_tell(gb);
215  bytestream2_skip(gb, 8);
216  nb_moves = bytestream2_get_le32(gb);
217  bytestream2_skip(gb, 8);
218  compression = bytestream2_get_le32(gb);
219 
220  if (nb_moves > INT32_MAX / 16 || nb_moves > avctx->width * avctx->height)
221  return AVERROR_INVALIDDATA;
222 
223  uncompressed_size = 16 * nb_moves;
224 
225  if (compression == 1) {
226  ret = decode_zlib(avctx, avpkt,
227  size - (bytestream2_tell(gb) - pos),
228  uncompressed_size);
229  if (ret < 0)
230  return ret;
231  bytestream2_init(&mc, s->delta, uncompressed_size);
232  } else if (compression == 0) {
233  bytestream2_init(&mc, avpkt->data + bytestream2_tell(gb),
235  } else if (compression == 2) {
236  avpriv_request_sample(avctx, "compression %d", compression);
237  return AVERROR_PATCHWELCOME;
238  } else {
239  return AVERROR_INVALIDDATA;
240  }
241 
242  if (bytestream2_get_bytes_left(&mc) < uncompressed_size)
243  return AVERROR_INVALIDDATA;
244 
245  for (int i = 0; i < nb_moves; i++) {
246  int type, start_x, start_y, end_x, end_y, mov_x, mov_y;
247  uint8_t *e2, *b1, *b2;
248  int w, h;
249 
250  type = bytestream2_get_le16(&mc);
251  start_x = bytestream2_get_le16(&mc);
252  start_y = bytestream2_get_le16(&mc);
253  end_x = bytestream2_get_le16(&mc);
254  end_y = bytestream2_get_le16(&mc);
255  mov_x = bytestream2_get_le16(&mc);
256  mov_y = bytestream2_get_le16(&mc);
257  bytestream2_skip(&mc, 2);
258 
259  if (start_x >= avctx->width || start_y >= avctx->height ||
260  end_x >= avctx->width || end_y >= avctx->height ||
261  mov_x >= avctx->width || mov_y >= avctx->height) {
262  continue;
263  }
264 
265  if (start_x >= end_x || start_y >= end_y)
266  continue;
267 
268  w = end_x - start_x;
269  h = end_y - start_y;
270 
271  if (mov_x + w > avctx->width || mov_y + h > avctx->height)
272  continue;
273 
274  if (!s->frame2->data[0] || !s->frame1->data[0])
275  return AVERROR_INVALIDDATA;
276 
277  b1 = s->frame1->data[0] + s->frame1->linesize[0] * (start_y + h - 1) + start_x * s->bpp;
278  b2 = s->frame2->data[0] + s->frame2->linesize[0] * (start_y + h - 1) + start_x * s->bpp;
279  e2 = s->frame2->data[0] + s->frame2->linesize[0] * (mov_y + h - 1) + mov_x * s->bpp;
280 
281  if (type == 2) {
282  for (int j = 0; j < h; j++) {
283  memcpy(b1, b2, w * s->bpp);
284  b1 -= s->frame1->linesize[0];
285  b2 -= s->frame2->linesize[0];
286  }
287  } else if (type == 1) {
288  for (int j = 0; j < h; j++) {
289  memset(b2, 0, w * s->bpp);
290  b2 -= s->frame2->linesize[0];
291  }
292  } else if (type == 0) {
293  uint8_t *buffer;
294 
295  av_fast_padded_malloc(&s->delta, &s->delta_size, w * h * s->bpp);
296  buffer = s->delta;
297  if (!buffer)
298  return AVERROR(ENOMEM);
299 
300  for (int j = 0; j < h; j++) {
301  memcpy(buffer + j * w * s->bpp, e2, w * s->bpp);
302  e2 -= s->frame2->linesize[0];
303  }
304 
305  for (int j = 0; j < h; j++) {
306  memcpy(b2, buffer + j * w * s->bpp, w * s->bpp);
307  b2 -= s->frame2->linesize[0];
308  }
309  } else {
310  return AVERROR_INVALIDDATA;
311  }
312  }
313 
314  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
315 
316  return 0;
317 }
318 
319 #define NEXT_LINE \
320  if (cx >= w * s->bpp) { \
321  cx = 0; \
322  cy--; \
323  b1 -= s->frame1->linesize[0]; \
324  b2 -= s->frame2->linesize[0]; \
325  } \
326  len--;
327 
328 static int decode_dlta(AVCodecContext *avctx,
329  AVPacket *avpkt, unsigned size)
330 {
331  RASCContext *s = avctx->priv_data;
332  GetByteContext *gb = &s->gb;
334  unsigned uncompressed_size, pos;
335  unsigned x, y, w, h;
336  int ret, cx, cy, compression;
337  uint8_t *b1, *b2;
338 
339  pos = bytestream2_tell(gb);
340  bytestream2_skip(gb, 12);
341  uncompressed_size = bytestream2_get_le32(gb);
342  x = bytestream2_get_le32(gb);
343  y = bytestream2_get_le32(gb);
344  w = bytestream2_get_le32(gb);
345  h = bytestream2_get_le32(gb);
346 
347  if (x >= avctx->width || y >= avctx->height ||
348  w > avctx->width || h > avctx->height)
349  return AVERROR_INVALIDDATA;
350 
351  if (x + w > avctx->width || y + h > avctx->height)
352  return AVERROR_INVALIDDATA;
353 
354  bytestream2_skip(gb, 4);
355  compression = bytestream2_get_le32(gb);
356 
357  if (compression == 1) {
358  if (w * h * s->bpp * 3 < uncompressed_size)
359  return AVERROR_INVALIDDATA;
360  ret = decode_zlib(avctx, avpkt, size, uncompressed_size);
361  if (ret < 0)
362  return ret;
363  bytestream2_init(&dc, s->delta, uncompressed_size);
364  } else if (compression == 0) {
365  if (bytestream2_get_bytes_left(gb) < uncompressed_size)
366  return AVERROR_INVALIDDATA;
367  bytestream2_init(&dc, avpkt->data + bytestream2_tell(gb),
368  uncompressed_size);
369  } else if (compression == 2) {
370  avpriv_request_sample(avctx, "compression %d", compression);
371  return AVERROR_PATCHWELCOME;
372  } else {
373  return AVERROR_INVALIDDATA;
374  }
375 
376  if (!s->frame2->data[0] || !s->frame1->data[0])
377  return AVERROR_INVALIDDATA;
378 
379  b1 = s->frame1->data[0] + s->frame1->linesize[0] * (y + h - 1) + x * s->bpp;
380  b2 = s->frame2->data[0] + s->frame2->linesize[0] * (y + h - 1) + x * s->bpp;
381  cx = 0, cy = h;
382  while (bytestream2_get_bytes_left(&dc) > 0) {
383  int type = bytestream2_get_byte(&dc);
384  int len = bytestream2_get_byte(&dc);
385  unsigned fill;
386 
387  switch (type) {
388  case 1:
389  while (len > 0 && cy > 0) {
390  cx++;
391  NEXT_LINE
392  }
393  break;
394  case 2:
395  while (len > 0 && cy > 0) {
396  int v0 = b1[cx];
397  int v1 = b2[cx];
398 
399  b2[cx] = v0;
400  b1[cx] = v1;
401  cx++;
402  NEXT_LINE
403  }
404  break;
405  case 3:
406  while (len > 0 && cy > 0) {
407  fill = bytestream2_get_byte(&dc);
408  b1[cx] = b2[cx];
409  b2[cx] = fill;
410  cx++;
411  NEXT_LINE
412  }
413  break;
414  case 4:
415  fill = bytestream2_get_byte(&dc);
416  while (len > 0 && cy > 0) {
417  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
418  AV_WL32(b2 + cx, fill);
419  cx++;
420  NEXT_LINE
421  }
422  break;
423  case 7:
424  fill = bytestream2_get_le32(&dc);
425  while (len > 0 && cy > 0) {
426  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
427  AV_WL32(b2 + cx, fill);
428  cx += 4;
429  NEXT_LINE
430  }
431  break;
432  case 10:
433  while (len > 0 && cy > 0) {
434  cx += 4;
435  NEXT_LINE
436  }
437  break;
438  case 12:
439  while (len > 0 && cy > 0) {
440  unsigned v0, v1;
441 
442  v0 = AV_RL32(b2 + cx);
443  v1 = AV_RL32(b1 + cx);
444  AV_WL32(b2 + cx, v1);
445  AV_WL32(b1 + cx, v0);
446  cx += 4;
447  NEXT_LINE
448  }
449  break;
450  case 13:
451  while (len > 0 && cy > 0) {
452  fill = bytestream2_get_le32(&dc);
453  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
454  AV_WL32(b2 + cx, fill);
455  cx += 4;
456  NEXT_LINE
457  }
458  break;
459  default:
460  avpriv_request_sample(avctx, "runlen %d", type);
461  return AVERROR_INVALIDDATA;
462  }
463  }
464 
465  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
466 
467  return 0;
468 }
469 
470 static int decode_kfrm(AVCodecContext *avctx,
471  AVPacket *avpkt, unsigned size)
472 {
473  RASCContext *s = avctx->priv_data;
474  GetByteContext *gb = &s->gb;
475  uint8_t *dst;
476  unsigned pos;
477  int zret, ret;
478 
479  pos = bytestream2_tell(gb);
480  if (bytestream2_peek_le32(gb) == 0x65) {
481  ret = decode_fint(avctx, avpkt, size);
482  if (ret < 0)
483  return ret;
484  }
485 
486  if (!s->frame2->data[0])
487  return AVERROR_INVALIDDATA;
488 
489  zret = inflateReset(&s->zstream);
490  if (zret != Z_OK) {
491  av_log(avctx, AV_LOG_ERROR, "Inflate reset error: %d\n", zret);
492  return AVERROR_EXTERNAL;
493  }
494 
495  s->zstream.next_in = avpkt->data + bytestream2_tell(gb);
496  s->zstream.avail_in = bytestream2_get_bytes_left(gb);
497 
498  dst = s->frame2->data[0] + (avctx->height - 1) * s->frame2->linesize[0];
499  for (int i = 0; i < avctx->height; i++) {
500  s->zstream.next_out = dst;
501  s->zstream.avail_out = s->stride;
502 
503  zret = inflate(&s->zstream, Z_SYNC_FLUSH);
504  if (zret != Z_OK && zret != Z_STREAM_END) {
505  av_log(avctx, AV_LOG_ERROR,
506  "Inflate failed with return code: %d.\n", zret);
507  return AVERROR_INVALIDDATA;
508  }
509 
510  dst -= s->frame2->linesize[0];
511  }
512 
513  dst = s->frame1->data[0] + (avctx->height - 1) * s->frame1->linesize[0];
514  for (int i = 0; i < avctx->height; i++) {
515  s->zstream.next_out = dst;
516  s->zstream.avail_out = s->stride;
517 
518  zret = inflate(&s->zstream, Z_SYNC_FLUSH);
519  if (zret != Z_OK && zret != Z_STREAM_END) {
520  av_log(avctx, AV_LOG_ERROR,
521  "Inflate failed with return code: %d.\n", zret);
522  return AVERROR_INVALIDDATA;
523  }
524 
525  dst -= s->frame1->linesize[0];
526  }
527 
528  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
529 
530  return 0;
531 }
532 
533 static int decode_mous(AVCodecContext *avctx,
534  AVPacket *avpkt, unsigned size)
535 {
536  RASCContext *s = avctx->priv_data;
537  GetByteContext *gb = &s->gb;
538  unsigned w, h, pos, uncompressed_size;
539  int ret;
540 
541  pos = bytestream2_tell(gb);
542  bytestream2_skip(gb, 8);
543  w = bytestream2_get_le32(gb);
544  h = bytestream2_get_le32(gb);
545  bytestream2_skip(gb, 12);
546  uncompressed_size = bytestream2_get_le32(gb);
547 
548  if (w > avctx->width || h > avctx->height)
549  return AVERROR_INVALIDDATA;
550 
551  if (uncompressed_size != 3 * w * h)
552  return AVERROR_INVALIDDATA;
553 
554  av_fast_padded_malloc(&s->cursor, &s->cursor_size, uncompressed_size);
555  if (!s->cursor)
556  return AVERROR(ENOMEM);
557 
558  ret = decode_zlib(avctx, avpkt,
559  size - (bytestream2_tell(gb) - pos),
560  uncompressed_size);
561  if (ret < 0)
562  return ret;
563  memcpy(s->cursor, s->delta, uncompressed_size);
564 
565  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
566 
567  s->cursor_w = w;
568  s->cursor_h = h;
569 
570  return 0;
571 }
572 
573 static int decode_mpos(AVCodecContext *avctx,
574  AVPacket *avpkt, unsigned size)
575 {
576  RASCContext *s = avctx->priv_data;
577  GetByteContext *gb = &s->gb;
578  unsigned pos;
579 
580  pos = bytestream2_tell(gb);
581  bytestream2_skip(gb, 8);
582  s->cursor_x = bytestream2_get_le32(gb);
583  s->cursor_y = bytestream2_get_le32(gb);
584 
585  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
586 
587  return 0;
588 }
589 
590 static void draw_cursor(AVCodecContext *avctx)
591 {
592  RASCContext *s = avctx->priv_data;
593  uint8_t *dst, *pal;
594 
595  if (!s->cursor)
596  return;
597 
598  if (s->cursor_x >= avctx->width || s->cursor_y >= avctx->height)
599  return;
600 
601  if (s->cursor_x + s->cursor_w > avctx->width ||
602  s->cursor_y + s->cursor_h > avctx->height)
603  return;
604 
605  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
606  pal = s->frame->data[1];
607  for (int i = 0; i < s->cursor_h; i++) {
608  for (int j = 0; j < s->cursor_w; j++) {
609  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
610  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
611  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
612  int best = INT_MAX;
613  int index = 0;
614  int dist;
615 
616  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
617  continue;
618 
619  dst = s->frame->data[0] + s->frame->linesize[0] * (s->cursor_y + i) + (s->cursor_x + j);
620  for (int k = 0; k < 256; k++) {
621  int pr = pal[k * 4 + 0];
622  int pg = pal[k * 4 + 1];
623  int pb = pal[k * 4 + 2];
624 
625  dist = FFABS(cr - pr) + FFABS(cg - pg) + FFABS(cb - pb);
626  if (dist < best) {
627  best = dist;
628  index = k;
629  }
630  }
631  dst[0] = index;
632  }
633  }
634  } else if (avctx->pix_fmt == AV_PIX_FMT_RGB555LE) {
635  for (int i = 0; i < s->cursor_h; i++) {
636  for (int j = 0; j < s->cursor_w; j++) {
637  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
638  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
639  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
640 
641  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
642  continue;
643 
644  cr >>= 3; cg >>=3; cb >>= 3;
645  dst = s->frame->data[0] + s->frame->linesize[0] * (s->cursor_y + i) + 2 * (s->cursor_x + j);
646  AV_WL16(dst, cr | cg << 5 | cb << 10);
647  }
648  }
649  } else if (avctx->pix_fmt == AV_PIX_FMT_BGR0) {
650  for (int i = 0; i < s->cursor_h; i++) {
651  for (int j = 0; j < s->cursor_w; j++) {
652  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
653  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
654  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
655 
656  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
657  continue;
658 
659  dst = s->frame->data[0] + s->frame->linesize[0] * (s->cursor_y + i) + 4 * (s->cursor_x + j);
660  dst[0] = cb;
661  dst[1] = cg;
662  dst[2] = cr;
663  }
664  }
665  }
666 }
667 
668 static int decode_frame(AVCodecContext *avctx,
669  void *data, int *got_frame,
670  AVPacket *avpkt)
671 {
672  RASCContext *s = avctx->priv_data;
673  GetByteContext *gb = &s->gb;
674  int ret, intra = 0;
675  AVFrame *frame = data;
676 
677  bytestream2_init(gb, avpkt->data, avpkt->size);
678 
679  if (bytestream2_peek_le32(gb) == EMPT)
680  return avpkt->size;
681 
682  s->frame = frame;
683 
684  while (bytestream2_get_bytes_left(gb) > 0) {
685  unsigned type, size = 0;
686 
687  if (bytestream2_get_bytes_left(gb) < 8)
688  return AVERROR_INVALIDDATA;
689 
690  type = bytestream2_get_le32(gb);
691  if (type == KBND || type == BNDL) {
692  intra = type == KBND;
693  type = bytestream2_get_le32(gb);
694  }
695 
696  size = bytestream2_get_le32(gb);
697  if (bytestream2_get_bytes_left(gb) < size)
698  return AVERROR_INVALIDDATA;
699 
700  switch (type) {
701  case FINT:
702  case INIT:
703  ret = decode_fint(avctx, avpkt, size);
704  break;
705  case KFRM:
706  ret = decode_kfrm(avctx, avpkt, size);
707  break;
708  case DLTA:
709  ret = decode_dlta(avctx, avpkt, size);
710  break;
711  case MOVE:
712  ret = decode_move(avctx, avpkt, size);
713  break;
714  case MOUS:
715  ret = decode_mous(avctx, avpkt, size);
716  break;
717  case MPOS:
718  ret = decode_mpos(avctx, avpkt, size);
719  break;
720  default:
721  bytestream2_skip(gb, size);
722  }
723 
724  if (ret < 0)
725  return ret;
726  }
727 
728  if (!s->frame2->data[0] || !s->frame1->data[0])
729  return AVERROR_INVALIDDATA;
730 
731  if ((ret = ff_get_buffer(avctx, s->frame, 0)) < 0)
732  return ret;
733 
734  copy_plane(avctx, s->frame2, s->frame);
735  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
736  memcpy(s->frame->data[1], s->frame2->data[1], 1024);
737  if (!s->skip_cursor)
738  draw_cursor(avctx);
739 
740  s->frame->key_frame = intra;
742 
743  *got_frame = 1;
744 
745  return avpkt->size;
746 }
747 
749 {
750  RASCContext *s = avctx->priv_data;
751  int zret;
752 
753  s->zstream.zalloc = Z_NULL;
754  s->zstream.zfree = Z_NULL;
755  s->zstream.opaque = Z_NULL;
756  zret = inflateInit(&s->zstream);
757  if (zret != Z_OK) {
758  av_log(avctx, AV_LOG_ERROR, "Inflate init error: %d\n", zret);
759  return AVERROR_EXTERNAL;
760  }
761 
762  s->frame1 = av_frame_alloc();
763  s->frame2 = av_frame_alloc();
764  if (!s->frame1 || !s->frame2)
765  return AVERROR(ENOMEM);
766 
767  return 0;
768 }
769 
771 {
772  RASCContext *s = avctx->priv_data;
773 
774  av_freep(&s->cursor);
775  s->cursor_size = 0;
776  av_freep(&s->delta);
777  s->delta_size = 0;
778  av_frame_free(&s->frame1);
779  av_frame_free(&s->frame2);
780  inflateEnd(&s->zstream);
781 
782  return 0;
783 }
784 
785 static void decode_flush(AVCodecContext *avctx)
786 {
787  RASCContext *s = avctx->priv_data;
788 
789  clear_plane(avctx, s->frame1);
790  clear_plane(avctx, s->frame2);
791 }
792 
793 static const AVOption options[] = {
794 { "skip_cursor", "skip the cursor", offsetof(RASCContext, skip_cursor), AV_OPT_TYPE_BOOL, {.i64 = 0 }, 0, 1, AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM },
795 { NULL },
796 };
797 
798 static const AVClass rasc_decoder_class = {
799  .class_name = "rasc decoder",
800  .item_name = av_default_item_name,
801  .option = options,
802  .version = LIBAVUTIL_VERSION_INT,
803 };
804 
806  .name = "rasc",
807  .long_name = NULL_IF_CONFIG_SMALL("RemotelyAnywhere Screen Capture"),
808  .type = AVMEDIA_TYPE_VIDEO,
809  .id = AV_CODEC_ID_RASC,
810  .priv_data_size = sizeof(RASCContext),
811  .init = decode_init,
812  .close = decode_close,
813  .decode = decode_frame,
814  .flush = decode_flush,
815  .capabilities = AV_CODEC_CAP_DR1,
816  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE |
818  .priv_class = &rasc_decoder_class,
819 };
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:48
#define EMPT
Definition: rasc.c:46
#define NULL
Definition: coverity.c:32
static void draw_cursor(AVCodecContext *avctx)
Definition: rasc.c:590
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
static av_cold int decode_close(AVCodecContext *avctx)
Definition: rasc.c:770
AVOption.
Definition: opt.h:246
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
static void flush(AVCodecContext *avctx)
const char * fmt
Definition: avisynth_c.h:861
misc image utilities
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:104
static const AVClass rasc_decoder_class
Definition: rasc.c:798
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
unsigned cursor_y
Definition: rasc.c:59
#define avpriv_request_sample(...)
packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined ...
Definition: pixfmt.h:108
int size
Definition: avcodec.h:1481
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:191
uint8_t * delta
Definition: rasc.c:52
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1778
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:133
GLint GLenum type
Definition: opengl_enc.c:104
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:70
GLfloat v0
Definition: opengl_enc.c:106
#define src
Definition: vp8dsp.c:254
static int decode_dlta(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:328
AVCodec.
Definition: avcodec.h:3492
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: rasc.c:668
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
AVCodec ff_rasc_decoder
Definition: rasc.c:805
#define INIT
Definition: rasc.c:39
#define MOVE
Definition: rasc.c:45
BYTE int const BYTE * srcp
Definition: avisynth_c.h:908
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
static void clear_plane(AVCodecContext *avctx, AVFrame *frame)
Definition: rasc.c:68
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:139
#define KBND
Definition: rasc.c:37
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:40
uint8_t
#define av_cold
Definition: attributes.h:82
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
AVOptions.
unsigned cursor_w
Definition: rasc.c:56
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:252
int cursor_size
Definition: rasc.c:55
uint8_t * data
Definition: avcodec.h:1480
static av_cold int decode_init(AVCodecContext *avctx)
Definition: rasc.c:748
int stride
Definition: rasc.c:60
ptrdiff_t size
Definition: opengl_enc.c:100
#define MPOS
Definition: rasc.c:44
#define BNDL
Definition: rasc.c:40
#define FFALIGN(x, a)
Definition: macros.h:48
#define av_log(a,...)
uint8_t * cursor
Definition: rasc.c:54
#define KFRM
Definition: rasc.c:41
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
AVFrame * frame1
Definition: rasc.c:64
unsigned cursor_x
Definition: rasc.c:58
#define DLTA
Definition: rasc.c:42
BYTE * dstp
Definition: avisynth_c.h:908
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:164
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
int bpp
Definition: rasc.c:61
static av_always_inline unsigned int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:154
#define NEXT_LINE
Definition: rasc.c:319
simple assert() macros that are a bit more flexible than ISO C assert().
static int decode_kfrm(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:470
const char * name
Name of the codec implementation.
Definition: avcodec.h:3499
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:378
int skip_cursor
Definition: rasc.c:50
#define FFMIN(a, b)
Definition: common.h:96
int width
picture width / height.
Definition: avcodec.h:1741
uint8_t w
Definition: llviddspenc.c:38
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:72
#define s(width, name)
Definition: cbs_vp9.c:257
static int decode_zlib(AVCodecContext *avctx, AVPacket *avpkt, unsigned size, unsigned uncompressed_size)
Definition: rasc.c:171
if(ret)
#define mc
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:188
Libavcodec external API header.
static int decode_move(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:204
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
#define AV_OPT_FLAG_VIDEO_PARAM
Definition: opt.h:279
main external API structure.
Definition: avcodec.h:1568
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2]...the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so...,+,-,+,-,+,+,-,+,-,+,...hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32-hcoeff[1]-hcoeff[2]-...a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2}an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||.........intra?||||:Block01:yes no||||:Block02:.................||||:Block03::y DC::ref index:||||:Block04::cb DC::motion x:||||.........:cr DC::motion y:||||.................|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------------------------------|||Y subbands||Cb subbands||Cr subbands||||------||------||------|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||------||------||------||||------||------||------|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||------||------||------||||------||------||------|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||------||------||------||||------||------||------|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------------------------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction------------|\Dequantization-------------------\||Reference frames|\IDWT|--------------|Motion\|||Frame 0||Frame 1||Compensation.OBMC v-------|--------------|--------------.\------> Frame n output Frame Frame<----------------------------------/|...|-------------------Range Coder:============Binary Range Coder:-------------------The implemented range coder is an adapted version based upon"Range encoding: an algorithm for removing redundancy from a digitised message."by G.N.N.Martin.The symbols encoded by the Snow range coder are bits(0|1).The associated probabilities are not fix but change depending on the symbol mix seen so far.bit seen|new state---------+-----------------------------------------------0|256-state_transition_table[256-old_state];1|state_transition_table[old_state];state_transition_table={0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:-------------------------FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1.the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled top and top right vectors is used as motion vector prediction the used motion vector is the sum of the predictor and(mvx_diff, mvy_diff)*mv_scale Intra DC Prediction block[y][x] dc[1]
Definition: snow.txt:400
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1968
static void copy_plane(AVCodecContext *avctx, AVFrame *src, AVFrame *dst)
Definition: rasc.c:79
Describe the class of an AVClass context structure.
Definition: log.h:67
int index
Definition: gxfenc.c:89
static void inflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:197
#define AV_OPT_FLAG_DECODING_PARAM
a generic parameter which can be set by the user for demuxing or decoding
Definition: opt.h:277
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:240
static int init_frames(AVCodecContext *avctx)
Definition: rasc.c:92
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
static void decode_flush(AVCodecContext *avctx)
Definition: rasc.c:785
static int decode_mous(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:533
static int decode_fint(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:111
common internal api header.
#define AV_WL16(p, v)
Definition: intreadwrite.h:412
#define FINT
Definition: rasc.c:38
AVFrame * frame2
Definition: rasc.c:65
void * priv_data
Definition: avcodec.h:1595
static const AVOption options[]
Definition: rasc.c:793
int len
unsigned cursor_h
Definition: rasc.c:57
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:373
static int decode_mpos(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:573
z_stream zstream
Definition: rasc.c:62
#define av_freep(p)
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:140
int delta_size
Definition: rasc.c:53
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:87
This structure stores compressed data.
Definition: avcodec.h:1457
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:984
GetByteContext gb
Definition: rasc.c:51
for(j=16;j >0;--j)
Predicted.
Definition: avutil.h:275
GLuint buffer
Definition: opengl_enc.c:101
#define AV_WL32(p, v)
Definition: intreadwrite.h:426
AVFrame * frame
Definition: rasc.c:63
#define MOUS
Definition: rasc.c:43