FFmpeg
rasc.c
Go to the documentation of this file.
1 /*
2  * RemotelyAnywhere Screen Capture decoder
3  *
4  * Copyright (c) 2018 Paul B Mahol
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <stdio.h>
24 #include <stdlib.h>
25 #include <string.h>
26 
27 #include "libavutil/avassert.h"
28 #include "libavutil/imgutils.h"
29 #include "libavutil/opt.h"
30 
31 #include "avcodec.h"
32 #include "bytestream.h"
33 #include "internal.h"
34 
35 #include <zlib.h>
36 
37 #define KBND MKTAG('K', 'B', 'N', 'D')
38 #define FINT MKTAG('F', 'I', 'N', 'T')
39 #define INIT MKTAG('I', 'N', 'I', 'T')
40 #define BNDL MKTAG('B', 'N', 'D', 'L')
41 #define KFRM MKTAG('K', 'F', 'R', 'M')
42 #define DLTA MKTAG('D', 'L', 'T', 'A')
43 #define MOUS MKTAG('M', 'O', 'U', 'S')
44 #define MPOS MKTAG('M', 'P', 'O', 'S')
45 #define MOVE MKTAG('M', 'O', 'V', 'E')
46 #define EMPT MKTAG('E', 'M', 'P', 'T')
47 
48 typedef struct RASCContext {
49  AVClass *class;
56  unsigned cursor_w;
57  unsigned cursor_h;
58  unsigned cursor_x;
59  unsigned cursor_y;
60  int stride;
61  int bpp;
62  z_stream zstream;
66 } RASCContext;
67 
68 static void clear_plane(AVCodecContext *avctx, AVFrame *frame)
69 {
70  RASCContext *s = avctx->priv_data;
71  uint8_t *dst = frame->data[0];
72 
73  for (int y = 0; y < avctx->height; y++) {
74  memset(dst, 0, avctx->width * s->bpp);
75  dst += frame->linesize[0];
76  }
77 }
78 
79 static void copy_plane(AVCodecContext *avctx, AVFrame *src, AVFrame *dst)
80 {
81  RASCContext *s = avctx->priv_data;
82  uint8_t *srcp = src->data[0];
83  uint8_t *dstp = dst->data[0];
84 
85  for (int y = 0; y < avctx->height; y++) {
86  memcpy(dstp, srcp, s->stride);
87  srcp += src->linesize[0];
88  dstp += dst->linesize[0];
89  }
90 }
91 
92 static int init_frames(AVCodecContext *avctx)
93 {
94  RASCContext *s = avctx->priv_data;
95  int ret;
96 
99  if ((ret = ff_get_buffer(avctx, s->frame1, 0)) < 0)
100  return ret;
101 
102  if ((ret = ff_get_buffer(avctx, s->frame2, 0)) < 0)
103  return ret;
104 
105  clear_plane(avctx, s->frame2);
106  clear_plane(avctx, s->frame1);
107 
108  return 0;
109 }
110 
111 static int decode_fint(AVCodecContext *avctx,
112  AVPacket *avpkt, unsigned size)
113 {
114  RASCContext *s = avctx->priv_data;
115  GetByteContext *gb = &s->gb;
116  unsigned w, h, fmt;
117  int ret;
118 
119  if (bytestream2_peek_le32(gb) != 0x65) {
120  if (!s->frame2->data[0] || !s->frame1->data[0])
121  return AVERROR_INVALIDDATA;
122 
123  clear_plane(avctx, s->frame2);
124  clear_plane(avctx, s->frame1);
125  return 0;
126  }
127 
128  bytestream2_skip(gb, 8);
129  w = bytestream2_get_le32(gb);
130  h = bytestream2_get_le32(gb);
131  bytestream2_skip(gb, 30);
132  fmt = bytestream2_get_le16(gb);
133  bytestream2_skip(gb, 24);
134 
135  switch (fmt) {
136  case 8: s->stride = FFALIGN(w, 4);
137  s->bpp = 1;
138  fmt = AV_PIX_FMT_PAL8; break;
139  case 16: s->stride = w * 2;
140  s->bpp = 2;
141  fmt = AV_PIX_FMT_RGB555LE; break;
142  case 32: s->stride = w * 4;
143  s->bpp = 4;
144  fmt = AV_PIX_FMT_BGR0; break;
145  default: return AVERROR_INVALIDDATA;
146  }
147 
148  ret = ff_set_dimensions(avctx, w, h);
149  if (ret < 0)
150  return ret;
151  avctx->width = w;
152  avctx->height = h;
153  avctx->pix_fmt = fmt;
154 
155  ret = init_frames(avctx);
156  if (ret < 0)
157  return ret;
158 
159  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
160  uint32_t *pal = (uint32_t *)s->frame2->data[1];
161 
162  for (int i = 0; i < 256; i++)
163  pal[i] = bytestream2_get_le32(gb) | 0xFF000000u;
164  }
165 
166  return 0;
167 }
168 
169 static int decode_zlib(AVCodecContext *avctx, AVPacket *avpkt,
170  unsigned size, unsigned uncompressed_size)
171 {
172  RASCContext *s = avctx->priv_data;
173  GetByteContext *gb = &s->gb;
174  int zret;
175 
176  zret = inflateReset(&s->zstream);
177  if (zret != Z_OK) {
178  av_log(avctx, AV_LOG_ERROR, "Inflate reset error: %d\n", zret);
179  return AVERROR_EXTERNAL;
180  }
181 
182  av_fast_padded_malloc(&s->delta, &s->delta_size, uncompressed_size);
183  if (!s->delta)
184  return AVERROR(ENOMEM);
185 
186  s->zstream.next_in = avpkt->data + bytestream2_tell(gb);
187  s->zstream.avail_in = FFMIN(size, bytestream2_get_bytes_left(gb));
188 
189  s->zstream.next_out = s->delta;
190  s->zstream.avail_out = s->delta_size;
191 
192  zret = inflate(&s->zstream, Z_FINISH);
193  if (zret != Z_STREAM_END) {
194  av_log(avctx, AV_LOG_ERROR,
195  "Inflate failed with return code: %d.\n", zret);
196  return AVERROR_INVALIDDATA;
197  }
198 
199  return 0;
200 }
201 
202 static int decode_move(AVCodecContext *avctx,
203  AVPacket *avpkt, unsigned size)
204 {
205  RASCContext *s = avctx->priv_data;
206  GetByteContext *gb = &s->gb;
208  unsigned pos, compression, nb_moves;
209  unsigned uncompressed_size;
210  int ret;
211 
212  pos = bytestream2_tell(gb);
213  bytestream2_skip(gb, 8);
214  nb_moves = bytestream2_get_le32(gb);
215  bytestream2_skip(gb, 8);
216  compression = bytestream2_get_le32(gb);
217 
218  if (nb_moves > INT32_MAX / 16 || nb_moves > avctx->width * avctx->height)
219  return AVERROR_INVALIDDATA;
220 
221  uncompressed_size = 16 * nb_moves;
222 
223  if (compression == 1) {
224  ret = decode_zlib(avctx, avpkt,
225  size - (bytestream2_tell(gb) - pos),
226  uncompressed_size);
227  if (ret < 0)
228  return ret;
229  bytestream2_init(&mc, s->delta, uncompressed_size);
230  } else if (compression == 0) {
231  bytestream2_init(&mc, avpkt->data + bytestream2_tell(gb),
233  } else if (compression == 2) {
234  avpriv_request_sample(avctx, "compression %d", compression);
235  return AVERROR_PATCHWELCOME;
236  } else {
237  return AVERROR_INVALIDDATA;
238  }
239 
240  if (bytestream2_get_bytes_left(&mc) < uncompressed_size)
241  return AVERROR_INVALIDDATA;
242 
243  for (int i = 0; i < nb_moves; i++) {
244  int type, start_x, start_y, end_x, end_y, mov_x, mov_y;
245  uint8_t *e2, *b1, *b2;
246  int w, h;
247 
248  type = bytestream2_get_le16(&mc);
249  start_x = bytestream2_get_le16(&mc);
250  start_y = bytestream2_get_le16(&mc);
251  end_x = bytestream2_get_le16(&mc);
252  end_y = bytestream2_get_le16(&mc);
253  mov_x = bytestream2_get_le16(&mc);
254  mov_y = bytestream2_get_le16(&mc);
255  bytestream2_skip(&mc, 2);
256 
257  if (start_x >= avctx->width || start_y >= avctx->height ||
258  end_x >= avctx->width || end_y >= avctx->height ||
259  mov_x >= avctx->width || mov_y >= avctx->height) {
260  continue;
261  }
262 
263  if (start_x >= end_x || start_y >= end_y)
264  continue;
265 
266  w = end_x - start_x;
267  h = end_y - start_y;
268 
269  if (mov_x + w > avctx->width || mov_y + h > avctx->height)
270  continue;
271 
272  if (!s->frame2->data[0] || !s->frame1->data[0])
273  return AVERROR_INVALIDDATA;
274 
275  b1 = s->frame1->data[0] + s->frame1->linesize[0] * (start_y + h - 1) + start_x * s->bpp;
276  b2 = s->frame2->data[0] + s->frame2->linesize[0] * (start_y + h - 1) + start_x * s->bpp;
277  e2 = s->frame2->data[0] + s->frame2->linesize[0] * (mov_y + h - 1) + mov_x * s->bpp;
278 
279  if (type == 2) {
280  for (int j = 0; j < h; j++) {
281  memcpy(b1, b2, w * s->bpp);
282  b1 -= s->frame1->linesize[0];
283  b2 -= s->frame2->linesize[0];
284  }
285  } else if (type == 1) {
286  for (int j = 0; j < h; j++) {
287  memset(b2, 0, w * s->bpp);
288  b2 -= s->frame2->linesize[0];
289  }
290  } else if (type == 0) {
291  uint8_t *buffer;
292 
293  av_fast_padded_malloc(&s->delta, &s->delta_size, w * h * s->bpp);
294  buffer = s->delta;
295  if (!buffer)
296  return AVERROR(ENOMEM);
297 
298  for (int j = 0; j < h; j++) {
299  memcpy(buffer + j * w * s->bpp, e2, w * s->bpp);
300  e2 -= s->frame2->linesize[0];
301  }
302 
303  for (int j = 0; j < h; j++) {
304  memcpy(b2, buffer + j * w * s->bpp, w * s->bpp);
305  b2 -= s->frame2->linesize[0];
306  }
307  } else {
308  return AVERROR_INVALIDDATA;
309  }
310  }
311 
312  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
313 
314  return 0;
315 }
316 
317 #define NEXT_LINE \
318  if (cx >= w * s->bpp) { \
319  cx = 0; \
320  cy--; \
321  b1 -= s->frame1->linesize[0]; \
322  b2 -= s->frame2->linesize[0]; \
323  } \
324  len--;
325 
326 static int decode_dlta(AVCodecContext *avctx,
327  AVPacket *avpkt, unsigned size)
328 {
329  RASCContext *s = avctx->priv_data;
330  GetByteContext *gb = &s->gb;
332  unsigned uncompressed_size, pos;
333  unsigned x, y, w, h;
334  int ret, cx, cy, compression;
335  uint8_t *b1, *b2;
336 
337  pos = bytestream2_tell(gb);
338  bytestream2_skip(gb, 12);
339  uncompressed_size = bytestream2_get_le32(gb);
340  x = bytestream2_get_le32(gb);
341  y = bytestream2_get_le32(gb);
342  w = bytestream2_get_le32(gb);
343  h = bytestream2_get_le32(gb);
344 
345  if (x >= avctx->width || y >= avctx->height ||
346  w > avctx->width || h > avctx->height)
347  return AVERROR_INVALIDDATA;
348 
349  if (x + w > avctx->width || y + h > avctx->height)
350  return AVERROR_INVALIDDATA;
351 
352  bytestream2_skip(gb, 4);
353  compression = bytestream2_get_le32(gb);
354 
355  if (compression == 1) {
356  if (w * h * s->bpp * 3 < uncompressed_size)
357  return AVERROR_INVALIDDATA;
358  ret = decode_zlib(avctx, avpkt, size, uncompressed_size);
359  if (ret < 0)
360  return ret;
361  bytestream2_init(&dc, s->delta, uncompressed_size);
362  } else if (compression == 0) {
363  if (bytestream2_get_bytes_left(gb) < uncompressed_size)
364  return AVERROR_INVALIDDATA;
365  bytestream2_init(&dc, avpkt->data + bytestream2_tell(gb),
366  uncompressed_size);
367  } else if (compression == 2) {
368  avpriv_request_sample(avctx, "compression %d", compression);
369  return AVERROR_PATCHWELCOME;
370  } else {
371  return AVERROR_INVALIDDATA;
372  }
373 
374  if (!s->frame2->data[0] || !s->frame1->data[0])
375  return AVERROR_INVALIDDATA;
376 
377  b1 = s->frame1->data[0] + s->frame1->linesize[0] * (y + h - 1) + x * s->bpp;
378  b2 = s->frame2->data[0] + s->frame2->linesize[0] * (y + h - 1) + x * s->bpp;
379  cx = 0, cy = h;
380  while (bytestream2_get_bytes_left(&dc) > 0) {
381  int type = bytestream2_get_byte(&dc);
382  int len = bytestream2_get_byte(&dc);
383  unsigned fill;
384 
385  switch (type) {
386  case 1:
387  while (len > 0 && cy > 0) {
388  cx++;
389  NEXT_LINE
390  }
391  break;
392  case 2:
393  while (len > 0 && cy > 0) {
394  int v0 = b1[cx];
395  int v1 = b2[cx];
396 
397  b2[cx] = v0;
398  b1[cx] = v1;
399  cx++;
400  NEXT_LINE
401  }
402  break;
403  case 3:
404  while (len > 0 && cy > 0) {
405  fill = bytestream2_get_byte(&dc);
406  b1[cx] = b2[cx];
407  b2[cx] = fill;
408  cx++;
409  NEXT_LINE
410  }
411  break;
412  case 4:
413  fill = bytestream2_get_byte(&dc);
414  while (len > 0 && cy > 0) {
415  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
416  AV_WL32(b2 + cx, fill);
417  cx++;
418  NEXT_LINE
419  }
420  break;
421  case 7:
422  fill = bytestream2_get_le32(&dc);
423  while (len > 0 && cy > 0) {
424  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
425  AV_WL32(b2 + cx, fill);
426  cx += 4;
427  NEXT_LINE
428  }
429  break;
430  case 10:
431  while (len > 0 && cy > 0) {
432  cx += 4;
433  NEXT_LINE
434  }
435  break;
436  case 12:
437  while (len > 0 && cy > 0) {
438  unsigned v0, v1;
439 
440  v0 = AV_RL32(b2 + cx);
441  v1 = AV_RL32(b1 + cx);
442  AV_WL32(b2 + cx, v1);
443  AV_WL32(b1 + cx, v0);
444  cx += 4;
445  NEXT_LINE
446  }
447  break;
448  case 13:
449  while (len > 0 && cy > 0) {
450  fill = bytestream2_get_le32(&dc);
451  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
452  AV_WL32(b2 + cx, fill);
453  cx += 4;
454  NEXT_LINE
455  }
456  break;
457  default:
458  avpriv_request_sample(avctx, "runlen %d", type);
459  return AVERROR_INVALIDDATA;
460  }
461  }
462 
463  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
464 
465  return 0;
466 }
467 
468 static int decode_kfrm(AVCodecContext *avctx,
469  AVPacket *avpkt, unsigned size)
470 {
471  RASCContext *s = avctx->priv_data;
472  GetByteContext *gb = &s->gb;
473  uint8_t *dst;
474  unsigned pos;
475  int zret, ret;
476 
477  pos = bytestream2_tell(gb);
478  if (bytestream2_peek_le32(gb) == 0x65) {
479  ret = decode_fint(avctx, avpkt, size);
480  if (ret < 0)
481  return ret;
482  }
483 
484  if (!s->frame2->data[0])
485  return AVERROR_INVALIDDATA;
486 
487  zret = inflateReset(&s->zstream);
488  if (zret != Z_OK) {
489  av_log(avctx, AV_LOG_ERROR, "Inflate reset error: %d\n", zret);
490  return AVERROR_EXTERNAL;
491  }
492 
493  s->zstream.next_in = avpkt->data + bytestream2_tell(gb);
494  s->zstream.avail_in = bytestream2_get_bytes_left(gb);
495 
496  dst = s->frame2->data[0] + (avctx->height - 1) * s->frame2->linesize[0];
497  for (int i = 0; i < avctx->height; i++) {
498  s->zstream.next_out = dst;
499  s->zstream.avail_out = s->stride;
500 
501  zret = inflate(&s->zstream, Z_SYNC_FLUSH);
502  if (zret != Z_OK && zret != Z_STREAM_END) {
503  av_log(avctx, AV_LOG_ERROR,
504  "Inflate failed with return code: %d.\n", zret);
505  return AVERROR_INVALIDDATA;
506  }
507 
508  dst -= s->frame2->linesize[0];
509  }
510 
511  dst = s->frame1->data[0] + (avctx->height - 1) * s->frame1->linesize[0];
512  for (int i = 0; i < avctx->height; i++) {
513  s->zstream.next_out = dst;
514  s->zstream.avail_out = s->stride;
515 
516  zret = inflate(&s->zstream, Z_SYNC_FLUSH);
517  if (zret != Z_OK && zret != Z_STREAM_END) {
518  av_log(avctx, AV_LOG_ERROR,
519  "Inflate failed with return code: %d.\n", zret);
520  return AVERROR_INVALIDDATA;
521  }
522 
523  dst -= s->frame1->linesize[0];
524  }
525 
526  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
527 
528  return 0;
529 }
530 
531 static int decode_mous(AVCodecContext *avctx,
532  AVPacket *avpkt, unsigned size)
533 {
534  RASCContext *s = avctx->priv_data;
535  GetByteContext *gb = &s->gb;
536  unsigned w, h, pos, uncompressed_size;
537  int ret;
538 
539  pos = bytestream2_tell(gb);
540  bytestream2_skip(gb, 8);
541  w = bytestream2_get_le32(gb);
542  h = bytestream2_get_le32(gb);
543  bytestream2_skip(gb, 12);
544  uncompressed_size = bytestream2_get_le32(gb);
545 
546  if (w > avctx->width || h > avctx->height)
547  return AVERROR_INVALIDDATA;
548 
549  if (uncompressed_size != 3 * w * h)
550  return AVERROR_INVALIDDATA;
551 
552  av_fast_padded_malloc(&s->cursor, &s->cursor_size, uncompressed_size);
553  if (!s->cursor)
554  return AVERROR(ENOMEM);
555 
556  ret = decode_zlib(avctx, avpkt,
557  size - (bytestream2_tell(gb) - pos),
558  uncompressed_size);
559  if (ret < 0)
560  return ret;
561  memcpy(s->cursor, s->delta, uncompressed_size);
562 
563  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
564 
565  s->cursor_w = w;
566  s->cursor_h = h;
567 
568  return 0;
569 }
570 
571 static int decode_mpos(AVCodecContext *avctx,
572  AVPacket *avpkt, unsigned size)
573 {
574  RASCContext *s = avctx->priv_data;
575  GetByteContext *gb = &s->gb;
576  unsigned pos;
577 
578  pos = bytestream2_tell(gb);
579  bytestream2_skip(gb, 8);
580  s->cursor_x = bytestream2_get_le32(gb);
581  s->cursor_y = bytestream2_get_le32(gb);
582 
583  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
584 
585  return 0;
586 }
587 
588 static void draw_cursor(AVCodecContext *avctx)
589 {
590  RASCContext *s = avctx->priv_data;
591  uint8_t *dst, *pal;
592 
593  if (!s->cursor)
594  return;
595 
596  if (s->cursor_x >= avctx->width || s->cursor_y >= avctx->height)
597  return;
598 
599  if (s->cursor_x + s->cursor_w > avctx->width ||
600  s->cursor_y + s->cursor_h > avctx->height)
601  return;
602 
603  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
604  pal = s->frame->data[1];
605  for (int i = 0; i < s->cursor_h; i++) {
606  for (int j = 0; j < s->cursor_w; j++) {
607  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
608  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
609  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
610  int best = INT_MAX;
611  int index = 0;
612  int dist;
613 
614  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
615  continue;
616 
617  dst = s->frame->data[0] + s->frame->linesize[0] * (s->cursor_y + i) + (s->cursor_x + j);
618  for (int k = 0; k < 256; k++) {
619  int pr = pal[k * 4 + 0];
620  int pg = pal[k * 4 + 1];
621  int pb = pal[k * 4 + 2];
622 
623  dist = FFABS(cr - pr) + FFABS(cg - pg) + FFABS(cb - pb);
624  if (dist < best) {
625  best = dist;
626  index = k;
627  }
628  }
629  dst[0] = index;
630  }
631  }
632  } else if (avctx->pix_fmt == AV_PIX_FMT_RGB555LE) {
633  for (int i = 0; i < s->cursor_h; i++) {
634  for (int j = 0; j < s->cursor_w; j++) {
635  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
636  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
637  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
638 
639  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
640  continue;
641 
642  cr >>= 3; cg >>=3; cb >>= 3;
643  dst = s->frame->data[0] + s->frame->linesize[0] * (s->cursor_y + i) + 2 * (s->cursor_x + j);
644  AV_WL16(dst, cr | cg << 5 | cb << 10);
645  }
646  }
647  } else if (avctx->pix_fmt == AV_PIX_FMT_BGR0) {
648  for (int i = 0; i < s->cursor_h; i++) {
649  for (int j = 0; j < s->cursor_w; j++) {
650  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
651  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
652  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
653 
654  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
655  continue;
656 
657  dst = s->frame->data[0] + s->frame->linesize[0] * (s->cursor_y + i) + 4 * (s->cursor_x + j);
658  dst[0] = cb;
659  dst[1] = cg;
660  dst[2] = cr;
661  }
662  }
663  }
664 }
665 
666 static int decode_frame(AVCodecContext *avctx,
667  void *data, int *got_frame,
668  AVPacket *avpkt)
669 {
670  RASCContext *s = avctx->priv_data;
671  GetByteContext *gb = &s->gb;
672  int ret, intra = 0;
673  AVFrame *frame = data;
674 
675  bytestream2_init(gb, avpkt->data, avpkt->size);
676 
677  if (bytestream2_peek_le32(gb) == EMPT)
678  return avpkt->size;
679 
680  s->frame = frame;
681 
682  while (bytestream2_get_bytes_left(gb) > 0) {
683  unsigned type, size = 0;
684 
685  if (bytestream2_get_bytes_left(gb) < 8)
686  return AVERROR_INVALIDDATA;
687 
688  type = bytestream2_get_le32(gb);
689  if (type == KBND || type == BNDL) {
690  intra = type == KBND;
691  type = bytestream2_get_le32(gb);
692  }
693 
694  size = bytestream2_get_le32(gb);
695  if (bytestream2_get_bytes_left(gb) < size)
696  return AVERROR_INVALIDDATA;
697 
698  switch (type) {
699  case FINT:
700  case INIT:
701  ret = decode_fint(avctx, avpkt, size);
702  break;
703  case KFRM:
704  ret = decode_kfrm(avctx, avpkt, size);
705  break;
706  case DLTA:
707  ret = decode_dlta(avctx, avpkt, size);
708  break;
709  case MOVE:
710  ret = decode_move(avctx, avpkt, size);
711  break;
712  case MOUS:
713  ret = decode_mous(avctx, avpkt, size);
714  break;
715  case MPOS:
716  ret = decode_mpos(avctx, avpkt, size);
717  break;
718  default:
719  bytestream2_skip(gb, size);
720  }
721 
722  if (ret < 0)
723  return ret;
724  }
725 
726  if (!s->frame2->data[0] || !s->frame1->data[0])
727  return AVERROR_INVALIDDATA;
728 
729  if ((ret = ff_get_buffer(avctx, s->frame, 0)) < 0)
730  return ret;
731 
732  copy_plane(avctx, s->frame2, s->frame);
733  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
734  memcpy(s->frame->data[1], s->frame2->data[1], 1024);
735  if (!s->skip_cursor)
736  draw_cursor(avctx);
737 
738  s->frame->key_frame = intra;
740 
741  *got_frame = 1;
742 
743  return avpkt->size;
744 }
745 
747 {
748  RASCContext *s = avctx->priv_data;
749  int zret;
750 
751  s->zstream.zalloc = Z_NULL;
752  s->zstream.zfree = Z_NULL;
753  s->zstream.opaque = Z_NULL;
754  zret = inflateInit(&s->zstream);
755  if (zret != Z_OK) {
756  av_log(avctx, AV_LOG_ERROR, "Inflate init error: %d\n", zret);
757  return AVERROR_EXTERNAL;
758  }
759 
760  s->frame1 = av_frame_alloc();
761  s->frame2 = av_frame_alloc();
762  if (!s->frame1 || !s->frame2)
763  return AVERROR(ENOMEM);
764 
765  return 0;
766 }
767 
769 {
770  RASCContext *s = avctx->priv_data;
771 
772  av_freep(&s->cursor);
773  s->cursor_size = 0;
774  av_freep(&s->delta);
775  s->delta_size = 0;
776  av_frame_free(&s->frame1);
777  av_frame_free(&s->frame2);
778  inflateEnd(&s->zstream);
779 
780  return 0;
781 }
782 
783 static void decode_flush(AVCodecContext *avctx)
784 {
785  RASCContext *s = avctx->priv_data;
786 
787  clear_plane(avctx, s->frame1);
788  clear_plane(avctx, s->frame2);
789 }
790 
791 static const AVOption options[] = {
792 { "skip_cursor", "skip the cursor", offsetof(RASCContext, skip_cursor), AV_OPT_TYPE_BOOL, {.i64 = 0 }, 0, 1, AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM },
793 { NULL },
794 };
795 
796 static const AVClass rasc_decoder_class = {
797  .class_name = "rasc decoder",
798  .item_name = av_default_item_name,
799  .option = options,
800  .version = LIBAVUTIL_VERSION_INT,
801 };
802 
804  .name = "rasc",
805  .long_name = NULL_IF_CONFIG_SMALL("RemotelyAnywhere Screen Capture"),
806  .type = AVMEDIA_TYPE_VIDEO,
807  .id = AV_CODEC_ID_RASC,
808  .priv_data_size = sizeof(RASCContext),
809  .init = decode_init,
810  .close = decode_close,
811  .decode = decode_frame,
812  .flush = decode_flush,
813  .capabilities = AV_CODEC_CAP_DR1,
814  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE |
816  .priv_class = &rasc_decoder_class,
817 };
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:48
#define EMPT
Definition: rasc.c:46
#define NULL
Definition: coverity.c:32
static void draw_cursor(AVCodecContext *avctx)
Definition: rasc.c:588
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
This structure describes decoded (raw) audio or video data.
Definition: frame.h:268
static av_cold int decode_close(AVCodecContext *avctx)
Definition: rasc.c:768
AVOption.
Definition: opt.h:246
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
static void flush(AVCodecContext *avctx)
const char * fmt
Definition: avisynth_c.h:861
misc image utilities
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:104
static const AVClass rasc_decoder_class
Definition: rasc.c:796
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
unsigned cursor_y
Definition: rasc.c:59
#define avpriv_request_sample(...)
packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined ...
Definition: pixfmt.h:108
int size
Definition: avcodec.h:1478
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:191
uint8_t * delta
Definition: rasc.c:52
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1775
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:133
GLint GLenum type
Definition: opengl_enc.c:104
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:70
GLfloat v0
Definition: opengl_enc.c:106
#define src
Definition: vp8dsp.c:254
static int decode_dlta(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:326
AVCodec.
Definition: avcodec.h:3477
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: rasc.c:666
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:42
AVCodec ff_rasc_decoder
Definition: rasc.c:803
#define INIT
Definition: rasc.c:39
#define MOVE
Definition: rasc.c:45
BYTE int const BYTE * srcp
Definition: avisynth_c.h:908
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
static void clear_plane(AVCodecContext *avctx, AVFrame *frame)
Definition: rasc.c:68
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:112
#define KBND
Definition: rasc.c:37
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:40
uint8_t
#define av_cold
Definition: attributes.h:82
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
AVOptions.
unsigned cursor_w
Definition: rasc.c:56
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:253
int cursor_size
Definition: rasc.c:55
uint8_t * data
Definition: avcodec.h:1477
static av_cold int decode_init(AVCodecContext *avctx)
Definition: rasc.c:746
int stride
Definition: rasc.c:60
ptrdiff_t size
Definition: opengl_enc.c:100
#define MPOS
Definition: rasc.c:44
#define BNDL
Definition: rasc.c:40
#define FFALIGN(x, a)
Definition: macros.h:48
#define av_log(a,...)
uint8_t * cursor
Definition: rasc.c:54
#define KFRM
Definition: rasc.c:41
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:260
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
AVFrame * frame1
Definition: rasc.c:64
unsigned cursor_x
Definition: rasc.c:58
#define DLTA
Definition: rasc.c:42
BYTE * dstp
Definition: avisynth_c.h:908
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:164
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
int bpp
Definition: rasc.c:61
static av_always_inline unsigned int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:154
#define NEXT_LINE
Definition: rasc.c:317
simple assert() macros that are a bit more flexible than ISO C assert().
static int decode_kfrm(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:468
const char * name
Name of the codec implementation.
Definition: avcodec.h:3484
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:351
int skip_cursor
Definition: rasc.c:50
#define FFMIN(a, b)
Definition: common.h:96
int width
picture width / height.
Definition: avcodec.h:1738
uint8_t w
Definition: llviddspenc.c:38
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:72
#define s(width, name)
Definition: cbs_vp9.c:257
static int decode_zlib(AVCodecContext *avctx, AVPacket *avpkt, unsigned size, unsigned uncompressed_size)
Definition: rasc.c:169
if(ret)
#define mc
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:188
Libavcodec external API header.
static int decode_move(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:202
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:299
#define AV_OPT_FLAG_VIDEO_PARAM
Definition: opt.h:279
main external API structure.
Definition: avcodec.h:1565
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2]...the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so...,+,-,+,-,+,+,-,+,-,+,...hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32-hcoeff[1]-hcoeff[2]-...a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2}an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||.........intra?||||:Block01:yes no||||:Block02:.................||||:Block03::y DC::ref index:||||:Block04::cb DC::motion x:||||.........:cr DC::motion y:||||.................|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------------------------------|||Y subbands||Cb subbands||Cr subbands||||------||------||------|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||------||------||------||||------||------||------|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||------||------||------||||------||------||------|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||------||------||------||||------||------||------|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------------------------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction------------|\Dequantization-------------------\||Reference frames|\IDWT|--------------|Motion\|||Frame 0||Frame 1||Compensation.OBMC v-------|--------------|--------------.\------> Frame n output Frame Frame<----------------------------------/|...|-------------------Range Coder:============Binary Range Coder:-------------------The implemented range coder is an adapted version based upon"Range encoding: an algorithm for removing redundancy from a digitised message."by G.N.N.Martin.The symbols encoded by the Snow range coder are bits(0|1).The associated probabilities are not fix but change depending on the symbol mix seen so far.bit seen|new state---------+-----------------------------------------------0|256-state_transition_table[256-old_state];1|state_transition_table[old_state];state_transition_table={0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:-------------------------FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1.the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled top and top right vectors is used as motion vector prediction the used motion vector is the sum of the predictor and(mvx_diff, mvy_diff)*mv_scale Intra DC Prediction block[y][x] dc[1]
Definition: snow.txt:400
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1964
static void copy_plane(AVCodecContext *avctx, AVFrame *src, AVFrame *dst)
Definition: rasc.c:79
Describe the class of an AVClass context structure.
Definition: log.h:67
int index
Definition: gxfenc.c:89
static void inflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:197
#define AV_OPT_FLAG_DECODING_PARAM
a generic parameter which can be set by the user for demuxing or decoding
Definition: opt.h:277
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:240
static int init_frames(AVCodecContext *avctx)
Definition: rasc.c:92
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:282
static void decode_flush(AVCodecContext *avctx)
Definition: rasc.c:783
static int decode_mous(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:531
static int decode_fint(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:111
common internal api header.
#define AV_WL16(p, v)
Definition: intreadwrite.h:412
#define FINT
Definition: rasc.c:38
AVFrame * frame2
Definition: rasc.c:65
void * priv_data
Definition: avcodec.h:1592
static const AVOption options[]
Definition: rasc.c:791
int len
unsigned cursor_h
Definition: rasc.c:57
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:346
static int decode_mpos(AVCodecContext *avctx, AVPacket *avpkt, unsigned size)
Definition: rasc.c:571
z_stream zstream
Definition: rasc.c:62
#define av_freep(p)
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:113
int delta_size
Definition: rasc.c:53
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:87
This structure stores compressed data.
Definition: avcodec.h:1454
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:981
GetByteContext gb
Definition: rasc.c:51
for(j=16;j >0;--j)
Predicted.
Definition: avutil.h:275
GLuint buffer
Definition: opengl_enc.c:101
#define AV_WL32(p, v)
Definition: intreadwrite.h:426
AVFrame * frame
Definition: rasc.c:63
#define MOUS
Definition: rasc.c:43